Compare commits

..

29 Commits

Author SHA1 Message Date
16d54c339f fixing multithreaded cts
All checks were successful
Build / build (push) Successful in 55s
Test / build_and_test (push) Successful in 47m16s
2026-04-18 15:35:06 +02:00
1378812a12 wat ?
Some checks failed
Build / build (push) Successful in 59s
Test / build_and_test (push) Has been cancelled
2026-04-18 15:31:59 +02:00
ac5e14f647 replacing env vars by compile options 2026-04-18 15:30:59 +02:00
8f68ce4bf7 re-adding env vars
Some checks failed
Build / build (push) Failing after 24s
Test / build_and_test (push) Successful in 11m5s
2026-04-18 14:14:22 +02:00
65c51b8213 fixing ci
Some checks failed
Build / build (push) Successful in 56s
Test / build_and_test (push) Failing after 3m41s
2026-04-18 03:18:41 +02:00
e97ee8b23d almost finished update to Zig 0.16.0
Some checks failed
Build / build (push) Successful in 2m18s
Test / build_and_test (push) Failing after 6m9s
2026-04-18 02:26:29 +02:00
d5a520e261 ci slip
Some checks failed
Build / build (push) Failing after 48s
Test / build_and_test (push) Failing after 1m19s
2026-04-16 23:37:57 +02:00
5be875c07e ci skip; start of update to zig 0.16
All checks were successful
Build / build (push) Has been skipped
Test / build_and_test (push) Has been skipped
2026-04-16 22:15:07 +02:00
ee0ffbe09d adding base blit
All checks were successful
Test / build_and_test (push) Successful in 55m18s
Build / build (push) Successful in 1m54s
2026-04-14 02:46:25 +02:00
95e8edabe0 adding image to image copy
All checks were successful
Build / build (push) Successful in 2m15s
Test / build_and_test (push) Successful in 54m45s
2026-04-13 16:36:10 +02:00
d54c28dcaa adding buffer-image copies
All checks were successful
Build / build (push) Successful in 2m11s
Test / build_and_test (push) Successful in 48m53s
2026-04-12 22:34:58 +02:00
3717fb3790 adding unsupported descriptor kinds
All checks were successful
Build / build (push) Successful in 1m57s
Test / build_and_test (push) Successful in 47m3s
2026-04-04 23:43:45 +02:00
6f6f2e6ab2 switching to smp_allocator
All checks were successful
Build / build (push) Successful in 2m5s
Test / build_and_test (push) Successful in 1h14m49s
2026-04-04 03:59:54 +02:00
c0e71d501a bumping spirv interpreter
All checks were successful
Build / build (push) Successful in 2m27s
Test / build_and_test (push) Successful in 58m6s
2026-04-02 03:42:47 +02:00
981e75fb58 yes
Some checks failed
Build / build (push) Successful in 2m2s
Test / build_and_test (push) Has been cancelled
2026-03-31 14:47:46 +02:00
e9d078247b bumping spirv interpreter
All checks were successful
Build / build (push) Successful in 1m39s
Test / build_and_test (push) Successful in 52m38s
2026-03-31 03:28:53 +02:00
95c6af284e adding specialization management to compute pipelines
All checks were successful
Build / build (push) Successful in 1m53s
Test / build_and_test (push) Successful in 56m23s
2026-03-30 04:48:05 +02:00
c0e2451f5f fixing zdt source
All checks were successful
Build / build (push) Successful in 1m57s
Test / build_and_test (push) Successful in 57m57s
2026-03-30 01:20:26 +02:00
d5a639aa89 fixing zdt source
Some checks failed
Build / build (push) Waiting to run
Test / build_and_test (push) Failing after 2m46s
2026-03-30 01:15:34 +02:00
9e72ba7501 bumping spirv interpreter
Some checks failed
Build / build (push) Successful in 1m48s
Test / build_and_test (push) Has been cancelled
2026-03-30 01:00:48 +02:00
6cbbf53bcd bumping spirv interpreter
All checks were successful
Build / build (push) Successful in 1m33s
Test / build_and_test (push) Successful in 1h19m23s
2026-03-24 04:34:49 +01:00
48a229cf8e bumping spirv interpreter
All checks were successful
Build / build (push) Successful in 1m36s
Test / build_and_test (push) Successful in 57m36s
2026-03-23 05:00:36 +01:00
153d014057 bumping spirv interpreter
All checks were successful
Build / build (push) Successful in 1m31s
Test / build_and_test (push) Successful in 51m44s
2026-03-22 04:48:20 +01:00
f30b279a83 bumping spirv interpreter
All checks were successful
Build / build (push) Successful in 1m40s
Test / build_and_test (push) Successful in 1h0m50s
2026-03-21 22:31:06 +01:00
67353d9b1c adding spirv interpreter results table dump
All checks were successful
Build / build (push) Successful in 1m39s
Test / build_and_test (push) Successful in 59m51s
2026-03-21 05:19:45 +01:00
7ba64de411 fixing command buffer reset on begin
Some checks failed
Build / build (push) Successful in 1m57s
Test / build_and_test (push) Failing after 39m36s
2026-03-20 13:15:47 +01:00
d417f5d3bd fixing compuite pipline test
All checks were successful
Build / build (push) Successful in 2m39s
Test / build_and_test (push) Successful in 48m54s
2026-03-19 02:22:05 +01:00
c7c0165e6b switching to release fast test
All checks were successful
Build / build (push) Successful in 2m18s
Test / build_and_test (push) Successful in 53m30s
2026-03-18 16:14:02 +01:00
ee49c86ebb adding secondary command buffers
Some checks failed
Build / build (push) Successful in 8m27s
Test / build_and_test (push) Failing after 25m19s
2026-03-18 03:22:46 +01:00
42 changed files with 1274 additions and 1033 deletions

View File

@@ -10,7 +10,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: mlugg/setup-zig@v2 - uses: https://codeberg.org/mlugg/setup-zig@v2
- uses: actions/setup-node@v6 - uses: actions/setup-node@v6
with: with:
node-version: 24 node-version: 24
@@ -55,7 +55,7 @@ jobs:
which deqp-runner && deqp-runner --version || echo "deqp-runner not found" which deqp-runner && deqp-runner --version || echo "deqp-runner not found"
- name: Run Vulkan CTS - name: Run Vulkan CTS
run: zig build cts-soft -- --mustpass-list=master -j3 run: zig build cts-soft --release=fast -- --mustpass-list=master -j3
continue-on-error: true continue-on-error: true
- name: Verify tests - name: Verify tests

View File

@@ -17,7 +17,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: mlugg/setup-zig@v2 - uses: https://codeberg.org/mlugg/setup-zig@v2
- uses: actions/setup-node@v6 - uses: actions/setup-node@v6
with: with:
node-version: 24 node-version: 24

View File

@@ -13,7 +13,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: mlugg/setup-zig@v2 - uses: https://codeberg.org/mlugg/setup-zig@v2
- uses: actions/setup-node@v6 - uses: actions/setup-node@v6
with: with:
node-version: 24 node-version: 24
@@ -61,7 +61,7 @@ jobs:
which deqp-runner && deqp-runner --version || echo "deqp-runner not found" which deqp-runner && deqp-runner --version || echo "deqp-runner not found"
- name: Run Vulkan CTS - name: Run Vulkan CTS
run: zig build cts-soft -- -j3 run: zig build cts-soft --release=fast -- -j3
continue-on-error: true continue-on-error: true
- name: Verify tests - name: Verify tests

3
.gitignore vendored
View File

@@ -1,6 +1,7 @@
.cache/ .cache/
.zig-cache/ .zig-cache/
zig-out/ zig-out/
zig-pkg/
cts/ cts/
cts_report/ cts_report/
scripts/__pycache__/ scripts/__pycache__/
@@ -14,3 +15,5 @@ scripts/__pycache__/
*.html *.html
*.pyc *.pyc
*.spv *.spv
*_table_dump.txt
vgcore.*

View File

@@ -10,7 +10,7 @@ It was forged for my own learning and amusement alone. Pray, do not wield it in
## Purpose ## Purpose
To understand Vulkan — not as a humble API mere mortals call upon, but as a labyrinthine system where one may craft a driver by hand. To understand Vulkan - not as a humble API mere mortals call upon, but as a labyrinthine system where one may craft a driver by hand.
It does not seek to produce a performant or production-worthy driver. \ It does not seek to produce a performant or production-worthy driver. \
*The gods are merciful, but not that merciful.* *The gods are merciful, but not that merciful.*
@@ -22,15 +22,9 @@ zig build
``` ```
Then ensure thy Vulkan loader is pointed toward the ICD manifest. Then ensure thy Vulkan loader is pointed toward the ICD manifest.
The precise ritual varies by system — consult the tomes of your operating system, or wander the web's endless mausoleum of documentation. The precise ritual varies by system - consult the tomes of your operating system, or wander the web's endless mausoleum of documentation.
Use at your own risk. If thy machine shudders, weeps, or attempts to flee — know that it was warned. Use at your own risk. If thy machine shudders, weeps, or attempts to flee - know that it was warned.
\
\
Thou may also conjure forth a tome of compile commands by doing thus:
```
zig build cdb
```
## Vulkan 1.0 specification ## Vulkan 1.0 specification
@@ -61,12 +55,12 @@ vkCmdClearAttachments | ⚙️ wip
vkCmdClearColorImage | ⚙️ WIP vkCmdClearColorImage | ⚙️ WIP
vkCmdClearDepthStencilImage | ⚙️ WIP vkCmdClearDepthStencilImage | ⚙️ WIP
vkCmdCopyBuffer | âś… Implemented vkCmdCopyBuffer | âś… Implemented
vkCmdCopyBufferToImage | ⚙️ WIP vkCmdCopyBufferToImage | ✅ Implemented
vkCmdCopyImage | ⚙️ WIP vkCmdCopyImage | ✅ Implemented
vkCmdCopyImageToBuffer | ⚙️ WIP vkCmdCopyImageToBuffer | ✅ Implemented
vkCmdCopyQueryPoolResults | ⚙️ WIP vkCmdCopyQueryPoolResults | ⚙️ WIP
vkCmdDispatch | âś… Implemented vkCmdDispatch | âś… Implemented
vkCmdDispatchIndirect | ⚙️ WIP vkCmdDispatchIndirect | ✅ Implemented
vkCmdDraw | ⚙️ WIP vkCmdDraw | ⚙️ WIP
vkCmdDrawIndexed | ⚙️ WIP vkCmdDrawIndexed | ⚙️ WIP
vkCmdDrawIndexedIndirect | ⚙️ WIP vkCmdDrawIndexedIndirect | ⚙️ WIP
@@ -109,7 +103,7 @@ vkCreateImage | âś… Implemented
vkCreateImageView | âś… Implemented vkCreateImageView | âś… Implemented
vkCreateInstance | âś… Implemented vkCreateInstance | âś… Implemented
vkCreatePipelineCache | ⚙️ WIP vkCreatePipelineCache | ⚙️ WIP
vkCreatePipelineLayout | ⚙️ WIP vkCreatePipelineLayout | ✅ Implemented
vkCreateQueryPool | ⚙️ WIP vkCreateQueryPool | ⚙️ WIP
vkCreateRenderPass | ⚙️ WIP vkCreateRenderPass | ⚙️ WIP
vkCreateSampler | ⚙️ WIP vkCreateSampler | ⚙️ WIP
@@ -129,7 +123,7 @@ vkDestroyImageView | âś… Implemented
vkDestroyInstance | âś… Implemented vkDestroyInstance | âś… Implemented
vkDestroyPipeline | âś… Implemented vkDestroyPipeline | âś… Implemented
vkDestroyPipelineCache | ⚙️ WIP vkDestroyPipelineCache | ⚙️ WIP
vkDestroyPipelineLayout | ⚙️ WIP vkDestroyPipelineLayout | ✅ Implemented
vkDestroyQueryPool | ⚙️ WIP vkDestroyQueryPool | ⚙️ WIP
vkDestroyRenderPass | ⚙️ WIP vkDestroyRenderPass | ⚙️ WIP
vkDestroySampler | ⚙️ WIP vkDestroySampler | ⚙️ WIP
@@ -157,8 +151,8 @@ vkGetImageSparseMemoryRequirements | ⚙️ WIP
vkGetImageSubresourceLayout | ⚙️ WIP vkGetImageSubresourceLayout | ⚙️ WIP
vkGetInstanceProcAddr | âś… Implemented vkGetInstanceProcAddr | âś… Implemented
vkGetPhysicalDeviceFeatures | âś… Implemented vkGetPhysicalDeviceFeatures | âś… Implemented
vkGetPhysicalDeviceFormatProperties | ⚙️ WIP vkGetPhysicalDeviceFormatProperties | ✅ Implemented
vkGetPhysicalDeviceImageFormatProperties | ⚙️ WIP vkGetPhysicalDeviceImageFormatProperties | ✅ Implemented
vkGetPhysicalDeviceMemoryProperties | âś… Implemented vkGetPhysicalDeviceMemoryProperties | âś… Implemented
vkGetPhysicalDeviceProperties | âś… Implemented vkGetPhysicalDeviceProperties | âś… Implemented
vkGetPhysicalDeviceQueueFamilyProperties | âś… Implemented vkGetPhysicalDeviceQueueFamilyProperties | âś… Implemented
@@ -173,8 +167,8 @@ vkQueueBindSparse | ⚙️ WIP
vkQueueSubmit | âś… Implemented vkQueueSubmit | âś… Implemented
vkQueueWaitIdle | âś… Implemented vkQueueWaitIdle | âś… Implemented
vkResetCommandBuffer | âś… Implemented vkResetCommandBuffer | âś… Implemented
vkResetCommandPool | ⚙️ WIP vkResetCommandPool | ✅ Implemented
vkResetDescriptorPool | ⚙️ WIP vkResetDescriptorPool | ✅ Implemented
vkResetEvent | ⚙️ WIP vkResetEvent | ⚙️ WIP
vkResetFences | âś… Implemented vkResetFences | âś… Implemented
vkSetEvent | ⚙️ WIP vkSetEvent | ⚙️ WIP

180
build.zig
View File

@@ -1,13 +1,12 @@
const std = @import("std"); const std = @import("std");
const Step = std.Build.Step; const Step = std.Build.Step;
const zcc = @import("compile_commands");
const builtin = @import("builtin"); const builtin = @import("builtin");
const ImplementationDesc = struct { const ImplementationDesc = struct {
name: []const u8, name: []const u8,
root_source_file: []const u8, root_source_file: []const u8,
vulkan_version: std.SemanticVersion, vulkan_version: std.SemanticVersion,
custom: ?*const fn (*std.Build, *std.Build.Step.Compile) anyerror!void = null, custom: ?*const fn (*std.Build, *std.Build.Step.Compile, *std.Build.Step.Options) anyerror!void = null,
}; };
const implementations = [_]ImplementationDesc{ const implementations = [_]ImplementationDesc{
@@ -19,6 +18,12 @@ const implementations = [_]ImplementationDesc{
}, },
}; };
const RunningMode = enum {
normal,
gdb,
valgrind,
};
pub fn build(b: *std.Build) !void { pub fn build(b: *std.Build) !void {
const target = b.standardTargetOptions(.{}); const target = b.standardTargetOptions(.{});
const optimize = b.standardOptimizeOption(.{}); const optimize = b.standardOptimizeOption(.{});
@@ -27,11 +32,8 @@ pub fn build(b: *std.Build) !void {
.root_source_file = b.path("src/vulkan/lib.zig"), .root_source_file = b.path("src/vulkan/lib.zig"),
.target = target, .target = target,
.optimize = optimize, .optimize = optimize,
.link_libc = true,
}); });
const zdt = b.dependency("zdt", .{}).module("zdt");
const zigrc = b.dependency("zigrc", .{}).module("zigrc");
const vulkan_headers = b.dependency("vulkan_headers", .{}); const vulkan_headers = b.dependency("vulkan_headers", .{});
const vulkan_utility_libraries = b.dependency("vulkan_utility_libraries", .{}); const vulkan_utility_libraries = b.dependency("vulkan_utility_libraries", .{});
@@ -39,19 +41,21 @@ pub fn build(b: *std.Build) !void {
.registry = vulkan_headers.path("registry/vk.xml"), .registry = vulkan_headers.path("registry/vk.xml"),
}).module("vulkan-zig"); }).module("vulkan-zig");
base_mod.addImport("zdt", zdt); const logs_option = b.option(bool, "logs", "Driver logs") orelse false;
base_mod.addImport("zigrc", zigrc);
const options = b.addOptions();
options.addOption(bool, "logs", logs_option);
base_mod.addImport("vulkan", vulkan); base_mod.addImport("vulkan", vulkan);
base_mod.addSystemIncludePath(vulkan_headers.path("include")); base_mod.addSystemIncludePath(vulkan_headers.path("include"));
base_mod.addSystemIncludePath(vulkan_utility_libraries.path("include")); base_mod.addSystemIncludePath(vulkan_utility_libraries.path("include"));
for (implementations) |impl| { const use_llvm = b.option(bool, "use-llvm", "LLVM build") orelse (b.release_mode != .off);
var targets = std.ArrayList(*std.Build.Step.Compile){};
for (implementations) |impl| {
const lib_mod = b.createModule(.{ const lib_mod = b.createModule(.{
.root_source_file = b.path(impl.root_source_file), .root_source_file = b.path(impl.root_source_file),
.target = target, .target = target,
.link_libc = true,
.optimize = optimize, .optimize = optimize,
.imports = &.{ .imports = &.{
.{ .name = "base", .module = base_mod }, .{ .name = "base", .module = base_mod },
@@ -65,11 +69,11 @@ pub fn build(b: *std.Build) !void {
.name = b.fmt("vulkan_{s}", .{impl.name}), .name = b.fmt("vulkan_{s}", .{impl.name}),
.root_module = lib_mod, .root_module = lib_mod,
.linkage = .dynamic, .linkage = .dynamic,
.use_llvm = true, // Fixes some random bugs happenning with custom backend. Investigations needed .use_llvm = use_llvm,
}); });
if (impl.custom) |custom| { if (impl.custom) |custom| {
custom(b, lib) catch continue; custom(b, lib, options) catch continue;
} }
const icd_file = b.addWriteFile( const icd_file = b.addWriteFile(
@@ -98,21 +102,29 @@ pub fn build(b: *std.Build) !void {
const test_step = b.step(b.fmt("test-{s}", .{impl.name}), b.fmt("Run libvulkan_{s} tests", .{impl.name})); const test_step = b.step(b.fmt("test-{s}", .{impl.name}), b.fmt("Run libvulkan_{s} tests", .{impl.name}));
test_step.dependOn(&run_tests.step); test_step.dependOn(&run_tests.step);
const c_test = addCTest(b, target, optimize, vulkan_headers, &impl, lib) catch continue; (try addCTS(b, target, &impl, lib, .normal)).dependOn(&lib_install.step);
(try addCTS(b, target, &impl, lib, .gdb)).dependOn(&lib_install.step);
try targets.append(b.allocator, c_test); (try addCTS(b, target, &impl, lib, .valgrind)).dependOn(&lib_install.step);
try targets.append(b.allocator, lib);
_ = zcc.createStep(b, "cdb", try targets.toOwnedSlice(b.allocator));
(try addCTestRunner(b, &impl, c_test, false)).dependOn(&lib_install.step);
(try addCTestRunner(b, &impl, c_test, true)).dependOn(&lib_install.step);
(try addCTS(b, target, &impl, lib, false)).dependOn(&lib_install.step);
(try addCTS(b, target, &impl, lib, true)).dependOn(&lib_install.step);
(try addMultithreadedCTS(b, target, &impl, lib)).dependOn(&lib_install.step); (try addMultithreadedCTS(b, target, &impl, lib)).dependOn(&lib_install.step);
const impl_autodoc_test = b.addObject(.{
.name = "lib",
.root_module = lib_mod,
});
const impl_install_docs = b.addInstallDirectory(.{
.source_dir = impl_autodoc_test.getEmittedDocs(),
.install_dir = .prefix,
.install_subdir = b.fmt("docs-{s}", .{impl.name}),
});
const impl_docs_step = b.step(b.fmt("docs-{s}", .{impl.name}), b.fmt("Build and install the documentation for lib_vulkan_{s}", .{impl.name}));
impl_docs_step.dependOn(&impl_install_docs.step);
} }
base_mod.addOptions("config", options);
const autodoc_test = b.addObject(.{ const autodoc_test = b.addObject(.{
.name = "lib", .name = "lib",
.root_module = base_mod, .root_module = base_mod,
@@ -128,13 +140,10 @@ pub fn build(b: *std.Build) !void {
docs_step.dependOn(&install_docs.step); docs_step.dependOn(&install_docs.step);
} }
fn customSoft(b: *std.Build, lib: *std.Build.Step.Compile) !void { fn customSoft(b: *std.Build, lib: *std.Build.Step.Compile, options: *std.Build.Step.Options) !void {
const cpuinfo = b.lazyDependency("cpuinfo", .{}) orelse return error.UnresolvedDependency; const cpuinfo = b.lazyDependency("cpuinfo", .{}) orelse return error.UnresolvedDependency;
lib.addSystemIncludePath(cpuinfo.path("include")); lib.root_module.addSystemIncludePath(cpuinfo.path("include"));
lib.linkLibrary(cpuinfo.artifact("cpuinfo")); lib.root_module.linkLibrary(cpuinfo.artifact("cpuinfo"));
const interface = b.lazyDependency("interface", .{}) orelse return error.UnresolvedDependency;
lib.root_module.addImport("interface", interface.module("interface"));
const spv = b.dependency("SPIRV_Interpreter", .{ const spv = b.dependency("SPIRV_Interpreter", .{
.@"no-example" = true, .@"no-example" = true,
@@ -143,57 +152,20 @@ fn customSoft(b: *std.Build, lib: *std.Build.Step.Compile) !void {
}).module("spv"); }).module("spv");
lib.root_module.addImport("spv", spv); lib.root_module.addImport("spv", spv);
const debug_allocator_option = b.option(bool, "debug-allocator", "debug device allocator") orelse false; const debug_allocator_option = b.option(bool, "debug-allocator", "Debug device allocator") orelse false;
const shaders_simd_option = b.option(bool, "shader-simd", "Shaders SIMD acceleration") orelse true;
const single_threaded_compute_option = b.option(bool, "single-threaded-compute", "Single threaded compute shaders execution") orelse true;
const compute_dump_early_results_table_option = b.option(u32, "compute-dump-early-results-table", "Dump compute shaders results table before invocation");
const compute_dump_final_results_table_option = b.option(u32, "compute-dump-final-results-table", "Dump compute shaders results table after invocation");
const options = b.addOptions();
options.addOption(bool, "debug_allocator", debug_allocator_option); options.addOption(bool, "debug_allocator", debug_allocator_option);
lib.root_module.addOptions("config", options); options.addOption(bool, "shaders_simd", shaders_simd_option);
options.addOption(bool, "single_threaded_compute", single_threaded_compute_option);
options.addOption(?u32, "compute_dump_early_results_table", compute_dump_early_results_table_option);
options.addOption(?u32, "compute_dump_final_results_table", compute_dump_final_results_table_option);
} }
fn addCTest(b: *std.Build, target: std.Build.ResolvedTarget, optimize: std.builtin.OptimizeMode, vulkan_headers: *std.Build.Dependency, impl: *const ImplementationDesc, impl_lib: *std.Build.Step.Compile) !*std.Build.Step.Compile { fn addCTS(b: *std.Build, target: std.Build.ResolvedTarget, impl: *const ImplementationDesc, impl_lib: *std.Build.Step.Compile, comptime mode: RunningMode) !*std.Build.Step {
const volk = b.lazyDependency("volk", .{}) orelse return error.DepNotFound;
const kvf = b.lazyDependency("kvf", .{}) orelse return error.DepNotFound;
const stb = b.lazyDependency("stb", .{}) orelse return error.DepNotFound;
const exe = b.addExecutable(.{
.name = b.fmt("c_test_vulkan_{s}", .{impl.name}),
.root_module = b.createModule(.{
.target = target,
.optimize = optimize,
.link_libc = true,
}),
});
exe.root_module.addSystemIncludePath(volk.path(""));
exe.root_module.addSystemIncludePath(kvf.path(""));
exe.root_module.addSystemIncludePath(stb.path(""));
exe.root_module.addSystemIncludePath(vulkan_headers.path("include"));
exe.root_module.addCSourceFile(.{
.file = b.path("test/c/main.c"),
.flags = &.{b.fmt("-DLIBVK=\"{s}\"", .{impl_lib.name})},
});
const install = b.addInstallArtifact(exe, .{});
install.step.dependOn(&impl_lib.step);
return exe;
}
fn addCTestRunner(b: *std.Build, impl: *const ImplementationDesc, exe: *std.Build.Step.Compile, comptime gdb: bool) !*std.Build.Step {
const run = b.addRunArtifact(exe);
if (gdb) {
try run.argv.insert(b.allocator, 0, .{ .bytes = b.fmt("gdb", .{}) }); // Hacky
}
run.step.dependOn(&exe.step);
const run_step = b.step(b.fmt("test-c-{s}{s}", .{ impl.name, if (gdb) "-gdb" else "" }), b.fmt("Run libvulkan_{s} C test{s}", .{ impl.name, if (gdb) " within GDB" else "" }));
run_step.dependOn(&run.step);
return &run.step;
}
fn addCTS(b: *std.Build, target: std.Build.ResolvedTarget, impl: *const ImplementationDesc, impl_lib: *std.Build.Step.Compile, comptime gdb: bool) !*std.Build.Step {
const cts = b.dependency("cts_bin", .{}); const cts = b.dependency("cts_bin", .{});
const cts_exe_name = cts.path(b.fmt("deqp-vk-{s}", .{ const cts_exe_name = cts.path(b.fmt("deqp-vk-{s}", .{
@@ -212,12 +184,23 @@ fn addCTS(b: *std.Build, target: std.Build.ResolvedTarget, impl: *const Implemen
const cts_exe_path = try cts_exe_name.getPath3(b, null).toString(b.allocator); const cts_exe_path = try cts_exe_name.getPath3(b, null).toString(b.allocator);
const run = b.addSystemCommand(&[_][]const u8{if (gdb) "gdb" else cts_exe_path}); const run = b.addSystemCommand(&[_][]const u8{switch (mode) {
.normal => cts_exe_path,
.gdb => "gdb",
.valgrind => "valgrind",
}});
run.step.dependOn(&impl_lib.step); run.step.dependOn(&impl_lib.step);
if (gdb) { switch (mode) {
.gdb => {
run.addArg("--args"); run.addArg("--args");
run.addArg(cts_exe_path); run.addArg(cts_exe_path);
},
.valgrind => {
run.addArg("--track-origins=yes");
run.addArg(cts_exe_path);
},
else => {},
} }
run.addArg(b.fmt("--deqp-archive-dir={s}", .{try cts.path("").getPath3(b, null).toString(b.allocator)})); run.addArg(b.fmt("--deqp-archive-dir={s}", .{try cts.path("").getPath3(b, null).toString(b.allocator)}));
@@ -237,7 +220,24 @@ fn addCTS(b: *std.Build, target: std.Build.ResolvedTarget, impl: *const Implemen
run.addArg(b.fmt("--deqp-caselist-file={s}", .{mustpass})); run.addArg(b.fmt("--deqp-caselist-file={s}", .{mustpass}));
} }
const run_step = b.step(b.fmt("raw-cts-{s}{s}", .{ impl.name, if (gdb) "-gdb" else "" }), b.fmt("Run Vulkan conformance tests for libvulkan_{s}{s}", .{ impl.name, if (gdb) " within GDB" else "" })); const run_step = b.step(
b.fmt("raw-cts-{s}{s}", .{
impl.name,
switch (mode) {
.normal => "",
.gdb => "-gdb",
.valgrind => "-valgrind",
},
}),
b.fmt("Run Vulkan conformance tests for libvulkan_{s}{s}", .{
impl.name,
switch (mode) {
.normal => "",
.gdb => " within GDB",
.valgrind => " within Valgrind",
},
}),
);
run_step.dependOn(&run.step); run_step.dependOn(&run.step);
return &run.step; return &run.step;
@@ -246,18 +246,6 @@ fn addCTS(b: *std.Build, target: std.Build.ResolvedTarget, impl: *const Implemen
fn addMultithreadedCTS(b: *std.Build, target: std.Build.ResolvedTarget, impl: *const ImplementationDesc, impl_lib: *std.Build.Step.Compile) !*std.Build.Step { fn addMultithreadedCTS(b: *std.Build, target: std.Build.ResolvedTarget, impl: *const ImplementationDesc, impl_lib: *std.Build.Step.Compile) !*std.Build.Step {
const cts = b.dependency("cts_bin", .{}); const cts = b.dependency("cts_bin", .{});
// Some systems may need a manual path management to get to packages (e.g. Github Actions)
const cache_path = blk: {
if (std.process.getEnvVarOwned(b.allocator, "ZIG_GLOBAL_CACHE_DIR")) |cache_path| {
break :blk b.fmt("{s}/../", .{cache_path});
} else |err| switch (err) {
error.EnvironmentVariableNotFound => {
break :blk "";
},
else => unreachable,
}
};
const cts_exe_name = cts.path(b.fmt("deqp-vk-{s}", .{ const cts_exe_name = cts.path(b.fmt("deqp-vk-{s}", .{
switch (if (target.query.os_tag) |tag| tag else builtin.target.os.tag) { switch (if (target.query.os_tag) |tag| tag else builtin.target.os.tag) {
.linux => "linux.x86_64", .linux => "linux.x86_64",
@@ -294,19 +282,21 @@ fn addMultithreadedCTS(b: *std.Build, target: std.Build.ResolvedTarget, impl: *c
run.addArg("run"); run.addArg("run");
run.addArg("--deqp"); run.addArg("--deqp");
run.addArg(b.fmt("{s}{s}", .{ cache_path, cts_exe_path })); run.addArg(cts_exe_path);
run.addArg("--caselist"); run.addArg("--caselist");
run.addArg(b.fmt("{s}{s}", .{ cache_path, mustpass_path })); run.addArg(mustpass_path);
run.addArg("--output"); run.addArg("--output");
run.addArg("./cts"); run.addArg("./cts");
run.addArg("--timeout");
run.addArg("300");
if (jobs_count) |count| { if (jobs_count) |count| {
run.addArg(b.fmt("-j{d}", .{count})); run.addArg(b.fmt("-j{d}", .{count}));
} }
run.addArg("--"); run.addArg("--");
run.addArg(b.fmt("--deqp-archive-dir={s}{s}", .{ cache_path, try cts.path("").getPath3(b, null).toString(b.allocator) })); run.addArg(b.fmt("--deqp-archive-dir={s}", .{try cts.path("").getPath3(b, null).toString(b.allocator)}));
run.addArg(b.fmt("--deqp-vk-library-path={s}", .{b.getInstallPath(.lib, impl_lib.out_lib_filename)})); run.addArg(b.fmt("--deqp-vk-library-path={s}", .{b.getInstallPath(.lib, impl_lib.out_lib_filename)}));
const run_step = b.step(b.fmt("cts-{s}", .{impl.name}), b.fmt("Run Vulkan conformance tests in a multithreaded environment for libvulkan_{s}", .{impl.name})); const run_step = b.step(b.fmt("cts-{s}", .{impl.name}), b.fmt("Run Vulkan conformance tests for libvulkan_{s} in a multithreaded environment", .{impl.name}));
run_step.dependOn(&run.step); run_step.dependOn(&run.step);
return &run.step; return &run.step;

View File

@@ -2,65 +2,33 @@
.name = .VulkanDriver, .name = .VulkanDriver,
.version = "0.0.1", .version = "0.0.1",
.fingerprint = 0x52cb73649f1107de, .fingerprint = 0x52cb73649f1107de,
.minimum_zig_version = "0.15.2", .minimum_zig_version = "0.16.0",
.dependencies = .{ .dependencies = .{
.compile_commands = .{
.url = "git+https://github.com/the-argus/zig-compile-commands#f74e2d13e43fafab3a71e19557a0e1cfbf0f2e1b",
.hash = "zig_compile_commands-0.0.1-OZg5-a3CAACM-h32Kjb1obTMqrKGs9YoDhorVZ8-LGle",
},
.vulkan_headers = .{ .vulkan_headers = .{
.url = "git+https://github.com/KhronosGroup/Vulkan-Headers?ref=v1.4.330#ee3b5caaa7e372715873c7b9c390ee1c3ca5db25", .url = "git+https://github.com/KhronosGroup/Vulkan-Headers?ref=v1.4.330#ee3b5caaa7e372715873c7b9c390ee1c3ca5db25",
.hash = "N-V-__8AAFXYAQKsK51AAGXB9HziPDFjS_DVUq6_QHVxHrBM", .hash = "N-V-__8AAFXYAQKsK51AAGXB9HziPDFjS_DVUq6_QHVxHrBM",
}, },
.vulkan_zig = .{ .vulkan_zig = .{
.url = "git+https://github.com/catmeow72/vulkan-zig/#8961518db28f88d2cf09ea68e146923de2cfa7f0", .url = "git+https://github.com/Snektron/vulkan-zig#be16fc99f7f794cba90334978c6c5aadeda8094f",
.hash = "vulkan-0.0.0-r7Ytx6hBAwD8X_TN32qlkzul4riK6vFvjtK9fZfRvALg", .hash = "vulkan-0.0.0-r7Ytx-V9AwCwHxS2TaZ-KJCp_3axidu3ASZUg3B512-k",
}, },
.vulkan_utility_libraries = .{ .vulkan_utility_libraries = .{
.url = "git+https://github.com/KhronosGroup/Vulkan-Utility-Libraries.git#ba452bad58bb4d4c64d7fbd872bf69f70141510e", .url = "git+https://github.com/KhronosGroup/Vulkan-Utility-Libraries.git#ba452bad58bb4d4c64d7fbd872bf69f70141510e",
.hash = "N-V-__8AAE42fwC1FFw26LNZ8AaSuGMdgG4vfYkfV_227sET", .hash = "N-V-__8AAE42fwC1FFw26LNZ8AaSuGMdgG4vfYkfV_227sET",
}, },
.zdt = .{
.url = "git+https://github.com/FObersteiner/zdt/?ref=v0.8.1#8b551a0a3e5ae64a32b5bad0e6a93119787b43af",
.hash = "zdt-0.8.1-xr0_vAxUDwCJRDh9pcAS_mdZBIsvcGTtN-K8JJSWY4I6",
},
.cts_bin = .{ .cts_bin = .{
.url = "git+https://git.kbz8.me/kbz_8/Vulkan-CTS-bin#19ce2da05f8176348064a9fc6688847e5f76a46e", .url = "git+https://git.kbz8.me/kbz_8/Vulkan-CTS-bin#19ce2da05f8176348064a9fc6688847e5f76a46e",
.hash = "N-V-__8AAHDV0xtS93nAGaYd7YWxBLnvHDEplwIpC29izSGa", .hash = "N-V-__8AAHDV0xtS93nAGaYd7YWxBLnvHDEplwIpC29izSGa",
}, },
.zigrc = .{
.url = "https://github.com/Aandreba/zigrc/archive/refs/tags/1.1.0.tar.gz",
.hash = "zigrc-1.0.0-lENlWzvQAACulrbkL9PVhWjFsWSkYhi7AmfSbCM-2Xlh",
},
.cpuinfo = .{ .cpuinfo = .{
.url = "git+https://github.com/Kbz-8/cpuinfo#4883954cfcec3f6c9ca9c4aaddfc26107e08726f", .url = "git+https://github.com/Kbz-8/cpuinfo.git#c9bea4f6c166a495ee0ce117821f9627d4aed118",
.hash = "cpuinfo-0.0.1-RLgIQTLRMgF4dLo8AJ-HvnpFsJe6jmXCJjMWWjil6RF1", .hash = "cpuinfo-0.0.1-RLgIQYrTMgGqfQMOd1nAa2EuglXOh5gR9bNzwMzQTemt",
.lazy = true,
},
.volk = .{
.url = "git+https://github.com/zeux/volk/#8f53cc717f50f142db4736f401d0b61956cd78f9",
.hash = "N-V-__8AAPn9BwCBHnaxOC_rffCpFI7QRfi5qBCLvov9EYK3",
.lazy = true,
},
.kvf = .{
.url = "git+https://git.kbz8.me/kbz_8/KVF#98b845f876bea94f7bf1b9d30588cf617bf93452",
.hash = "N-V-__8AAEGKAgC2cGDnxmAIFKkaICxS_ogfVYWH83Re29zN",
.lazy = true,
},
.stb = .{
.url = "git+https://github.com/nothings/stb#f1c79c02822848a9bed4315b12c8c8f3761e1296",
.hash = "N-V-__8AABQ7TgCnPlp8MP4YA8znrjd6E-ZjpF1rvrS8J_2I",
.lazy = true,
},
.interface = .{
.url = "git+https://github.com/nilslice/zig-interface#8c0fe8fa9fd0702eee43f50cb75dce1cc5a7e1f4",
.hash = "interface-0.0.2-GFlWJ1mcAQARS-V4xJ7qDt5_cutxOHSEz6H9yiK-Sw0A",
.lazy = true, .lazy = true,
}, },
.SPIRV_Interpreter = .{ .SPIRV_Interpreter = .{
.url = "git+https://git.kbz8.me/kbz_8/SPIRV-Interpreter#e8a08d78851dd07e72de7b40821cd2af10f38866", .url = "git+https://git.kbz8.me/kbz_8/SPIRV-Interpreter#664ea9b92bf84bc97ec4a062c171562bf6628263",
.hash = "SPIRV_Interpreter-0.0.1-ajmpn9DJAwAaQ_vqdY_c-GX__VX9YQZ5viIBxsgPUlBk", .hash = "SPIRV_Interpreter-0.0.1-ajmpn1qKBACshq_ncUUF-zXJzpdNLRzIAPcWRQL57W8l",
}, },
}, },

View File

@@ -18,9 +18,14 @@ const ExecutionDevice = @import("device/Device.zig");
const Self = @This(); const Self = @This();
pub const Interface = base.CommandBuffer; pub const Interface = base.CommandBuffer;
const Command = InterfaceFactory(.{ const Command = struct {
.execute = fn (*ExecutionDevice) VkError!void, const VTable = struct {
}, null); execute: *const fn (*anyopaque, *ExecutionDevice) VkError!void,
};
ptr: *anyopaque,
vtable: *const VTable,
};
interface: Interface, interface: Interface,
@@ -41,6 +46,7 @@ pub fn create(device: *base.Device, allocator: std.mem.Allocator, info: *const v
.begin = begin, .begin = begin,
.bindDescriptorSets = bindDescriptorSets, .bindDescriptorSets = bindDescriptorSets,
.bindPipeline = bindPipeline, .bindPipeline = bindPipeline,
.blitImage = blitImage,
.clearColorImage = clearColorImage, .clearColorImage = clearColorImage,
.copyBuffer = copyBuffer, .copyBuffer = copyBuffer,
.copyBufferToImage = copyBufferToImage, .copyBufferToImage = copyBufferToImage,
@@ -49,6 +55,7 @@ pub fn create(device: *base.Device, allocator: std.mem.Allocator, info: *const v
.dispatch = dispatch, .dispatch = dispatch,
.dispatchIndirect = dispatchIndirect, .dispatchIndirect = dispatchIndirect,
.end = end, .end = end,
.executeCommands = executeCommands,
.fillBuffer = fillBuffer, .fillBuffer = fillBuffer,
.pipelineBarrier = pipelineBarrier, .pipelineBarrier = pipelineBarrier,
.reset = reset, .reset = reset,
@@ -69,6 +76,7 @@ pub fn create(device: *base.Device, allocator: std.mem.Allocator, info: *const v
pub fn destroy(interface: *Interface, allocator: std.mem.Allocator) void { pub fn destroy(interface: *Interface, allocator: std.mem.Allocator) void {
const self: *Self = @alignCast(@fieldParentPtr("interface", interface)); const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
allocator.destroy(self); allocator.destroy(self);
_ = self.command_allocator.reset(.free_all);
} }
pub fn execute(self: *Self, device: *ExecutionDevice) void { pub fn execute(self: *Self, device: *ExecutionDevice) void {
@@ -76,10 +84,10 @@ pub fn execute(self: *Self, device: *ExecutionDevice) void {
defer self.interface.finish() catch {}; defer self.interface.finish() catch {};
for (self.commands.items) |command| { for (self.commands.items) |command| {
command.vtable.execute(command.ptr, device) catch |err| { command.vtable.execute(@ptrCast(command.ptr), device) catch |err| {
base.errors.errorLoggerContext(err, "the software execution device"); base.errors.errorLoggerContext(err, "the software execution device");
if (@errorReturnTrace()) |trace| { if (@errorReturnTrace()) |trace| {
std.debug.dumpStackTrace(trace.*); std.debug.dumpErrorReturnTrace(trace);
} }
return; // Should we return or continue ? Maybe device lost ? return; // Should we return or continue ? Maybe device lost ?
}; };
@@ -98,8 +106,7 @@ pub fn end(interface: *Interface) VkError!void {
pub fn reset(interface: *Interface, _: vk.CommandBufferResetFlags) VkError!void { pub fn reset(interface: *Interface, _: vk.CommandBufferResetFlags) VkError!void {
const self: *Self = @alignCast(@fieldParentPtr("interface", interface)); const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
const allocator = self.command_allocator.allocator(); self.commands.clearAndFree(self.command_allocator.allocator());
self.commands.clearAndFree(allocator);
_ = self.command_allocator.reset(.free_all); _ = self.command_allocator.reset(.free_all);
} }
@@ -117,7 +124,8 @@ pub fn bindDescriptorSets(interface: *Interface, bind_point: vk.PipelineBindPoin
sets: [base.VULKAN_MAX_DESCRIPTOR_SETS]?*base.DescriptorSet, sets: [base.VULKAN_MAX_DESCRIPTOR_SETS]?*base.DescriptorSet,
dynamic_offsets: []const u32, dynamic_offsets: []const u32,
pub fn execute(impl: *const Impl, device: *ExecutionDevice) VkError!void { pub fn execute(context: *anyopaque, device: *ExecutionDevice) VkError!void {
const impl: *Impl = @ptrCast(@alignCast(context));
for (impl.first_set.., impl.sets[0..]) |i, set| { for (impl.first_set.., impl.sets[0..]) |i, set| {
if (set == null) if (set == null)
break; break;
@@ -134,7 +142,7 @@ pub fn bindDescriptorSets(interface: *Interface, bind_point: vk.PipelineBindPoin
.sets = sets, .sets = sets,
.dynamic_offsets = dynamic_offsets, .dynamic_offsets = dynamic_offsets,
}; };
self.commands.append(allocator, Command.from(cmd)) catch return VkError.OutOfHostMemory; self.commands.append(allocator, .{ .ptr = cmd, .vtable = &.{ .execute = CommandImpl.execute } }) catch return VkError.OutOfHostMemory;
} }
pub fn bindPipeline(interface: *Interface, bind_point: vk.PipelineBindPoint, pipeline: *base.Pipeline) VkError!void { pub fn bindPipeline(interface: *Interface, bind_point: vk.PipelineBindPoint, pipeline: *base.Pipeline) VkError!void {
@@ -147,7 +155,8 @@ pub fn bindPipeline(interface: *Interface, bind_point: vk.PipelineBindPoint, pip
bind_point: vk.PipelineBindPoint, bind_point: vk.PipelineBindPoint,
pipeline: *SoftPipeline, pipeline: *SoftPipeline,
pub fn execute(impl: *const Impl, device: *ExecutionDevice) VkError!void { pub fn execute(context: *anyopaque, device: *ExecutionDevice) VkError!void {
const impl: *Impl = @ptrCast(@alignCast(context));
device.pipeline_states[@intCast(@intFromEnum(impl.bind_point))].pipeline = impl.pipeline; device.pipeline_states[@intCast(@intFromEnum(impl.bind_point))].pipeline = impl.pipeline;
} }
}; };
@@ -158,10 +167,41 @@ pub fn bindPipeline(interface: *Interface, bind_point: vk.PipelineBindPoint, pip
.bind_point = bind_point, .bind_point = bind_point,
.pipeline = @alignCast(@fieldParentPtr("interface", pipeline)), .pipeline = @alignCast(@fieldParentPtr("interface", pipeline)),
}; };
self.commands.append(allocator, Command.from(cmd)) catch return VkError.OutOfHostMemory; self.commands.append(allocator, .{ .ptr = cmd, .vtable = &.{ .execute = CommandImpl.execute } }) catch return VkError.OutOfHostMemory;
} }
pub fn clearColorImage(interface: *Interface, image: *base.Image, layout: vk.ImageLayout, color: *const vk.ClearColorValue, range: vk.ImageSubresourceRange) VkError!void { pub fn blitImage(interface: *Interface, src: *base.Image, _: vk.ImageLayout, dst: *base.Image, _: vk.ImageLayout, regions: []const vk.ImageBlit, filter: vk.Filter) VkError!void {
const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
const allocator = self.command_allocator.allocator();
const CommandImpl = struct {
const Impl = @This();
src: *const SoftImage,
dst: *SoftImage,
regions: []const vk.ImageBlit,
filter: vk.Filter,
pub fn execute(context: *anyopaque, device: *ExecutionDevice) VkError!void {
const impl: *Impl = @ptrCast(@alignCast(context));
for (impl.regions[0..]) |region| {
try device.blitter.blitRegion(impl.src, impl.dst, region, impl.filter);
}
}
};
const cmd = allocator.create(CommandImpl) catch return VkError.OutOfHostMemory;
errdefer allocator.destroy(cmd);
cmd.* = .{
.src = @alignCast(@fieldParentPtr("interface", src)),
.dst = @alignCast(@fieldParentPtr("interface", dst)),
.regions = allocator.dupe(vk.ImageBlit, regions) catch return VkError.OutOfHostMemory, // Will be freed on cmdbuf reset or destroy
.filter = filter,
};
self.commands.append(allocator, .{ .ptr = cmd, .vtable = &.{ .execute = CommandImpl.execute } }) catch return VkError.OutOfHostMemory;
}
pub fn clearColorImage(interface: *Interface, image: *base.Image, _: vk.ImageLayout, color: *const vk.ClearColorValue, range: vk.ImageSubresourceRange) VkError!void {
const self: *Self = @alignCast(@fieldParentPtr("interface", interface)); const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
const allocator = self.command_allocator.allocator(); const allocator = self.command_allocator.allocator();
@@ -169,12 +209,13 @@ pub fn clearColorImage(interface: *Interface, image: *base.Image, layout: vk.Ima
const Impl = @This(); const Impl = @This();
image: *SoftImage, image: *SoftImage,
layout: vk.ImageLayout,
clear_color: vk.ClearColorValue, clear_color: vk.ClearColorValue,
range: vk.ImageSubresourceRange, range: vk.ImageSubresourceRange,
pub fn execute(impl: *const Impl, _: *ExecutionDevice) VkError!void { pub fn execute(context: *anyopaque, device: *ExecutionDevice) VkError!void {
impl.image.clearRange(impl.clear_color, impl.range); const impl: *Impl = @ptrCast(@alignCast(context));
const clear_format = try impl.image.getClearFormat();
try device.blitter.clear(.{ .color = impl.clear_color }, clear_format, impl.image, impl.image.interface.format, impl.range, null);
} }
}; };
@@ -182,11 +223,10 @@ pub fn clearColorImage(interface: *Interface, image: *base.Image, layout: vk.Ima
errdefer allocator.destroy(cmd); errdefer allocator.destroy(cmd);
cmd.* = .{ cmd.* = .{
.image = @alignCast(@fieldParentPtr("interface", image)), .image = @alignCast(@fieldParentPtr("interface", image)),
.layout = layout,
.clear_color = color.*, .clear_color = color.*,
.range = range, .range = range,
}; };
self.commands.append(allocator, Command.from(cmd)) catch return VkError.OutOfHostMemory; self.commands.append(allocator, .{ .ptr = cmd, .vtable = &.{ .execute = CommandImpl.execute } }) catch return VkError.OutOfHostMemory;
} }
pub fn copyBuffer(interface: *Interface, src: *base.Buffer, dst: *base.Buffer, regions: []const vk.BufferCopy) VkError!void { pub fn copyBuffer(interface: *Interface, src: *base.Buffer, dst: *base.Buffer, regions: []const vk.BufferCopy) VkError!void {
@@ -200,7 +240,8 @@ pub fn copyBuffer(interface: *Interface, src: *base.Buffer, dst: *base.Buffer, r
dst: *SoftBuffer, dst: *SoftBuffer,
regions: []const vk.BufferCopy, regions: []const vk.BufferCopy,
pub fn execute(impl: *const Impl, _: *ExecutionDevice) VkError!void { pub fn execute(context: *anyopaque, _: *ExecutionDevice) VkError!void {
const impl: *Impl = @ptrCast(@alignCast(context));
try impl.src.copyBuffer(impl.dst, impl.regions); try impl.src.copyBuffer(impl.dst, impl.regions);
} }
}; };
@@ -212,7 +253,7 @@ pub fn copyBuffer(interface: *Interface, src: *base.Buffer, dst: *base.Buffer, r
.dst = @alignCast(@fieldParentPtr("interface", dst)), .dst = @alignCast(@fieldParentPtr("interface", dst)),
.regions = allocator.dupe(vk.BufferCopy, regions) catch return VkError.OutOfHostMemory, // Will be freed on cmdbuf reset or destroy .regions = allocator.dupe(vk.BufferCopy, regions) catch return VkError.OutOfHostMemory, // Will be freed on cmdbuf reset or destroy
}; };
self.commands.append(allocator, Command.from(cmd)) catch return VkError.OutOfHostMemory; self.commands.append(allocator, .{ .ptr = cmd, .vtable = &.{ .execute = CommandImpl.execute } }) catch return VkError.OutOfHostMemory;
} }
pub fn copyBufferToImage(interface: *Interface, src: *base.Buffer, dst: *base.Image, dst_layout: vk.ImageLayout, regions: []const vk.BufferImageCopy) VkError!void { pub fn copyBufferToImage(interface: *Interface, src: *base.Buffer, dst: *base.Image, dst_layout: vk.ImageLayout, regions: []const vk.BufferImageCopy) VkError!void {
@@ -227,7 +268,8 @@ pub fn copyBufferToImage(interface: *Interface, src: *base.Buffer, dst: *base.Im
dst_layout: vk.ImageLayout, dst_layout: vk.ImageLayout,
regions: []const vk.BufferImageCopy, regions: []const vk.BufferImageCopy,
pub fn execute(impl: *const Impl, _: *ExecutionDevice) VkError!void { pub fn execute(context: *anyopaque, _: *ExecutionDevice) VkError!void {
const impl: *Impl = @ptrCast(@alignCast(context));
for (impl.regions[0..]) |region| { for (impl.regions[0..]) |region| {
try impl.dst.copyFromBuffer(impl.src, region); try impl.dst.copyFromBuffer(impl.src, region);
} }
@@ -242,10 +284,10 @@ pub fn copyBufferToImage(interface: *Interface, src: *base.Buffer, dst: *base.Im
.dst = @alignCast(@fieldParentPtr("interface", dst)), .dst = @alignCast(@fieldParentPtr("interface", dst)),
.regions = allocator.dupe(vk.BufferImageCopy, regions) catch return VkError.OutOfHostMemory, // Will be freed on cmdbuf reset or destroy .regions = allocator.dupe(vk.BufferImageCopy, regions) catch return VkError.OutOfHostMemory, // Will be freed on cmdbuf reset or destroy
}; };
self.commands.append(allocator, Command.from(cmd)) catch return VkError.OutOfHostMemory; self.commands.append(allocator, .{ .ptr = cmd, .vtable = &.{ .execute = CommandImpl.execute } }) catch return VkError.OutOfHostMemory;
} }
pub fn copyImage(interface: *Interface, src: *base.Image, src_layout: vk.ImageLayout, dst: *base.Image, dst_layout: vk.ImageLayout, regions: []const vk.ImageCopy) VkError!void { pub fn copyImage(interface: *Interface, src: *base.Image, _: vk.ImageLayout, dst: *base.Image, _: vk.ImageLayout, regions: []const vk.ImageCopy) VkError!void {
const self: *Self = @alignCast(@fieldParentPtr("interface", interface)); const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
const allocator = self.command_allocator.allocator(); const allocator = self.command_allocator.allocator();
@@ -253,13 +295,14 @@ pub fn copyImage(interface: *Interface, src: *base.Image, src_layout: vk.ImageLa
const Impl = @This(); const Impl = @This();
src: *const SoftImage, src: *const SoftImage,
src_layout: vk.ImageLayout,
dst: *SoftImage, dst: *SoftImage,
dst_layout: vk.ImageLayout,
regions: []const vk.ImageCopy, regions: []const vk.ImageCopy,
pub fn execute(impl: *const Impl, _: *ExecutionDevice) VkError!void { pub fn execute(context: *anyopaque, _: *ExecutionDevice) VkError!void {
try impl.src.copyImage(impl.src_layout, impl.dst, impl.dst_layout, impl.regions); const impl: *Impl = @ptrCast(@alignCast(context));
for (impl.regions[0..]) |region| {
try impl.src.copyToImage(impl.dst, region);
}
} }
}; };
@@ -267,12 +310,10 @@ pub fn copyImage(interface: *Interface, src: *base.Image, src_layout: vk.ImageLa
errdefer allocator.destroy(cmd); errdefer allocator.destroy(cmd);
cmd.* = .{ cmd.* = .{
.src = @alignCast(@fieldParentPtr("interface", src)), .src = @alignCast(@fieldParentPtr("interface", src)),
.src_layout = src_layout,
.dst = @alignCast(@fieldParentPtr("interface", dst)), .dst = @alignCast(@fieldParentPtr("interface", dst)),
.dst_layout = dst_layout,
.regions = allocator.dupe(vk.ImageCopy, regions) catch return VkError.OutOfHostMemory, // Will be freed on cmdbuf reset or destroy .regions = allocator.dupe(vk.ImageCopy, regions) catch return VkError.OutOfHostMemory, // Will be freed on cmdbuf reset or destroy
}; };
self.commands.append(allocator, Command.from(cmd)) catch return VkError.OutOfHostMemory; self.commands.append(allocator, .{ .ptr = cmd, .vtable = &.{ .execute = CommandImpl.execute } }) catch return VkError.OutOfHostMemory;
} }
pub fn copyImageToBuffer(interface: *Interface, src: *base.Image, src_layout: vk.ImageLayout, dst: *base.Buffer, regions: []const vk.BufferImageCopy) VkError!void { pub fn copyImageToBuffer(interface: *Interface, src: *base.Image, src_layout: vk.ImageLayout, dst: *base.Buffer, regions: []const vk.BufferImageCopy) VkError!void {
@@ -287,7 +328,8 @@ pub fn copyImageToBuffer(interface: *Interface, src: *base.Image, src_layout: vk
dst: *SoftBuffer, dst: *SoftBuffer,
regions: []const vk.BufferImageCopy, regions: []const vk.BufferImageCopy,
pub fn execute(impl: *const Impl, _: *ExecutionDevice) VkError!void { pub fn execute(context: *anyopaque, _: *ExecutionDevice) VkError!void {
const impl: *Impl = @ptrCast(@alignCast(context));
for (impl.regions[0..]) |region| { for (impl.regions[0..]) |region| {
try impl.src.copyToBuffer(impl.dst, region); try impl.src.copyToBuffer(impl.dst, region);
} }
@@ -302,7 +344,7 @@ pub fn copyImageToBuffer(interface: *Interface, src: *base.Image, src_layout: vk
.dst = @alignCast(@fieldParentPtr("interface", dst)), .dst = @alignCast(@fieldParentPtr("interface", dst)),
.regions = allocator.dupe(vk.BufferImageCopy, regions) catch return VkError.OutOfHostMemory, // Will be freed on cmdbuf reset or destroy .regions = allocator.dupe(vk.BufferImageCopy, regions) catch return VkError.OutOfHostMemory, // Will be freed on cmdbuf reset or destroy
}; };
self.commands.append(allocator, Command.from(cmd)) catch return VkError.OutOfHostMemory; self.commands.append(allocator, .{ .ptr = cmd, .vtable = &.{ .execute = CommandImpl.execute } }) catch return VkError.OutOfHostMemory;
} }
pub fn dispatch(interface: *Interface, group_count_x: u32, group_count_y: u32, group_count_z: u32) VkError!void { pub fn dispatch(interface: *Interface, group_count_x: u32, group_count_y: u32, group_count_z: u32) VkError!void {
@@ -316,7 +358,8 @@ pub fn dispatch(interface: *Interface, group_count_x: u32, group_count_y: u32, g
group_count_y: u32, group_count_y: u32,
group_count_z: u32, group_count_z: u32,
pub fn execute(impl: *const Impl, device: *ExecutionDevice) VkError!void { pub fn execute(context: *anyopaque, device: *ExecutionDevice) VkError!void {
const impl: *Impl = @ptrCast(@alignCast(context));
try device.compute_routines.dispatch(impl.group_count_x, impl.group_count_y, impl.group_count_z); try device.compute_routines.dispatch(impl.group_count_x, impl.group_count_y, impl.group_count_z);
} }
}; };
@@ -328,7 +371,7 @@ pub fn dispatch(interface: *Interface, group_count_x: u32, group_count_y: u32, g
.group_count_y = group_count_y, .group_count_y = group_count_y,
.group_count_z = group_count_z, .group_count_z = group_count_z,
}; };
self.commands.append(allocator, Command.from(cmd)) catch return VkError.OutOfHostMemory; self.commands.append(allocator, .{ .ptr = cmd, .vtable = &.{ .execute = CommandImpl.execute } }) catch return VkError.OutOfHostMemory;
} }
pub fn dispatchIndirect(interface: *Interface, buffer: *base.Buffer, offset: vk.DeviceSize) VkError!void { pub fn dispatchIndirect(interface: *Interface, buffer: *base.Buffer, offset: vk.DeviceSize) VkError!void {
@@ -341,13 +384,11 @@ pub fn dispatchIndirect(interface: *Interface, buffer: *base.Buffer, offset: vk.
buffer: *SoftBuffer, buffer: *SoftBuffer,
offset: vk.DeviceSize, offset: vk.DeviceSize,
pub fn execute(impl: *const Impl, device: *ExecutionDevice) VkError!void { pub fn execute(context: *anyopaque, device: *ExecutionDevice) VkError!void {
const impl: *Impl = @ptrCast(@alignCast(context));
const size = 3 * @sizeOf(u32); const size = 3 * @sizeOf(u32);
const memory = if (impl.buffer.interface.memory) |memory| memory else return VkError.InvalidDeviceMemoryDrv; const memory = if (impl.buffer.interface.memory) |memory| memory else return VkError.InvalidDeviceMemoryDrv;
const map: []u32 = @as([*]u32, @ptrCast(@alignCast(try memory.map(impl.offset, size))))[0..3]; const map: []u32 = @as([*]u32, @ptrCast(@alignCast(try memory.map(impl.offset, size))))[0..3];
std.debug.print("{any}\n", .{map});
try device.compute_routines.dispatch(map[0], map[1], map[2]); try device.compute_routines.dispatch(map[0], map[1], map[2]);
} }
}; };
@@ -358,7 +399,30 @@ pub fn dispatchIndirect(interface: *Interface, buffer: *base.Buffer, offset: vk.
.buffer = @alignCast(@fieldParentPtr("interface", buffer)), .buffer = @alignCast(@fieldParentPtr("interface", buffer)),
.offset = offset, .offset = offset,
}; };
self.commands.append(allocator, Command.from(cmd)) catch return VkError.OutOfHostMemory; self.commands.append(allocator, .{ .ptr = cmd, .vtable = &.{ .execute = CommandImpl.execute } }) catch return VkError.OutOfHostMemory;
}
pub fn executeCommands(interface: *Interface, commands: *Interface) VkError!void {
const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
const allocator = self.command_allocator.allocator();
const CommandImpl = struct {
const Impl = @This();
cmd: *Self,
pub fn execute(context: *anyopaque, device: *ExecutionDevice) VkError!void {
const impl: *Impl = @ptrCast(@alignCast(context));
impl.cmd.execute(device);
}
};
const cmd = allocator.create(CommandImpl) catch return VkError.OutOfHostMemory;
errdefer allocator.destroy(cmd);
cmd.* = .{
.cmd = @alignCast(@fieldParentPtr("interface", commands)),
};
self.commands.append(allocator, .{ .ptr = cmd, .vtable = &.{ .execute = CommandImpl.execute } }) catch return VkError.OutOfHostMemory;
} }
pub fn fillBuffer(interface: *Interface, buffer: *base.Buffer, offset: vk.DeviceSize, size: vk.DeviceSize, data: u32) VkError!void { pub fn fillBuffer(interface: *Interface, buffer: *base.Buffer, offset: vk.DeviceSize, size: vk.DeviceSize, data: u32) VkError!void {
@@ -373,7 +437,8 @@ pub fn fillBuffer(interface: *Interface, buffer: *base.Buffer, offset: vk.Device
size: vk.DeviceSize, size: vk.DeviceSize,
data: u32, data: u32,
pub fn execute(impl: *const Impl, _: *ExecutionDevice) VkError!void { pub fn execute(context: *anyopaque, _: *ExecutionDevice) VkError!void {
const impl: *Impl = @ptrCast(@alignCast(context));
try impl.buffer.fillBuffer(impl.offset, impl.size, impl.data); try impl.buffer.fillBuffer(impl.offset, impl.size, impl.data);
} }
}; };
@@ -386,7 +451,7 @@ pub fn fillBuffer(interface: *Interface, buffer: *base.Buffer, offset: vk.Device
.size = size, .size = size,
.data = data, .data = data,
}; };
self.commands.append(allocator, Command.from(cmd)) catch return VkError.OutOfHostMemory; self.commands.append(allocator, .{ .ptr = cmd, .vtable = &.{ .execute = CommandImpl.execute } }) catch return VkError.OutOfHostMemory;
} }
pub fn pipelineBarrier(interface: *Interface, src_stage: vk.PipelineStageFlags, dst_stage: vk.PipelineStageFlags, dependency: vk.DependencyFlags, memory_barriers: []const vk.MemoryBarrier, buffer_barriers: []const vk.BufferMemoryBarrier, image_barriers: []const vk.ImageMemoryBarrier) VkError!void { pub fn pipelineBarrier(interface: *Interface, src_stage: vk.PipelineStageFlags, dst_stage: vk.PipelineStageFlags, dependency: vk.DependencyFlags, memory_barriers: []const vk.MemoryBarrier, buffer_barriers: []const vk.BufferMemoryBarrier, image_barriers: []const vk.ImageMemoryBarrier) VkError!void {
@@ -396,15 +461,15 @@ pub fn pipelineBarrier(interface: *Interface, src_stage: vk.PipelineStageFlags,
const CommandImpl = struct { const CommandImpl = struct {
const Impl = @This(); const Impl = @This();
pub fn execute(_: *const Impl, _: *ExecutionDevice) VkError!void { pub fn execute(_: *anyopaque, _: *ExecutionDevice) VkError!void {
// TODO: implement synchronization for rasterizations stages // TODO: implement synchronization for rasterization stages
} }
}; };
const cmd = allocator.create(CommandImpl) catch return VkError.OutOfHostMemory; const cmd = allocator.create(CommandImpl) catch return VkError.OutOfHostMemory;
errdefer allocator.destroy(cmd); errdefer allocator.destroy(cmd);
cmd.* = .{}; cmd.* = .{};
self.commands.append(allocator, Command.from(cmd)) catch return VkError.OutOfHostMemory; self.commands.append(allocator, .{ .ptr = cmd, .vtable = &.{ .execute = CommandImpl.execute } }) catch return VkError.OutOfHostMemory;
_ = src_stage; _ = src_stage;
_ = dst_stage; _ = dst_stage;

View File

@@ -14,6 +14,8 @@ pub const Interface = base.DescriptorPool;
interface: Interface, interface: Interface,
list: std.ArrayList(*SoftDescriptorSet),
pub fn create(device: *base.Device, allocator: std.mem.Allocator, info: *const vk.DescriptorPoolCreateInfo) VkError!*Self { pub fn create(device: *base.Device, allocator: std.mem.Allocator, info: *const vk.DescriptorPoolCreateInfo) VkError!*Self {
const self = allocator.create(Self) catch return VkError.OutOfHostMemory; const self = allocator.create(Self) catch return VkError.OutOfHostMemory;
errdefer allocator.destroy(self); errdefer allocator.destroy(self);
@@ -24,27 +26,48 @@ pub fn create(device: *base.Device, allocator: std.mem.Allocator, info: *const v
.allocateDescriptorSet = allocateDescriptorSet, .allocateDescriptorSet = allocateDescriptorSet,
.destroy = destroy, .destroy = destroy,
.freeDescriptorSet = freeDescriptorSet, .freeDescriptorSet = freeDescriptorSet,
.reset = reset,
}; };
self.* = .{ self.* = .{
.interface = interface, .interface = interface,
.list = std.ArrayList(*SoftDescriptorSet).initCapacity(allocator, info.max_sets) catch return VkError.OutOfHostMemory,
}; };
return self; return self;
} }
pub fn allocateDescriptorSet(interface: *Interface, layout: *base.DescriptorSetLayout) VkError!*base.DescriptorSet { pub fn allocateDescriptorSet(interface: *Interface, layout: *base.DescriptorSetLayout) VkError!*base.DescriptorSet {
const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
const allocator = VulkanAllocator.init(null, .object).allocator(); const allocator = VulkanAllocator.init(null, .object).allocator();
const set = try SoftDescriptorSet.create(interface.owner, allocator, layout); const set = try SoftDescriptorSet.create(interface.owner, allocator, layout);
self.list.appendAssumeCapacity(set);
return &set.interface; return &set.interface;
} }
pub fn destroy(interface: *Interface, allocator: std.mem.Allocator) void { pub fn destroy(interface: *Interface, allocator: std.mem.Allocator) void {
const self: *Self = @alignCast(@fieldParentPtr("interface", interface)); const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
self.list.deinit(allocator);
allocator.destroy(self); allocator.destroy(self);
} }
pub fn freeDescriptorSet(interface: *Interface, set: *base.DescriptorSet) VkError!void { pub fn freeDescriptorSet(interface: *Interface, set: *base.DescriptorSet) VkError!void {
_ = interface; const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
const soft_set: *SoftDescriptorSet = @alignCast(@fieldParentPtr("interface", set));
if (std.mem.indexOfScalar(*SoftDescriptorSet, self.list.items, soft_set)) |pos| {
_ = self.list.orderedRemove(pos);
}
const allocator = VulkanAllocator.init(null, .object).allocator(); const allocator = VulkanAllocator.init(null, .object).allocator();
allocator.destroy(set); allocator.destroy(soft_set);
}
pub fn reset(interface: *Interface, _: vk.DescriptorPoolResetFlags) VkError!void {
const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
const allocator = VulkanAllocator.init(null, .object).allocator();
for (self.list.items) |set| {
allocator.destroy(set);
}
self.list.clearRetainingCapacity();
} }

View File

@@ -5,8 +5,12 @@ const base = @import("base");
const VkError = base.VkError; const VkError = base.VkError;
const Device = base.Device; const Device = base.Device;
const Buffer = base.Buffer; const Buffer = base.Buffer;
const ImageView = base.ImageView;
const SoftBuffer = @import("SoftBuffer.zig"); const SoftBuffer = @import("SoftBuffer.zig");
const SoftImage = @import("SoftImage.zig");
const SoftImageView = @import("SoftImageView.zig");
const SoftSampler = @import("SoftSampler.zig");
const NonDispatchable = base.NonDispatchable; const NonDispatchable = base.NonDispatchable;
@@ -19,9 +23,20 @@ const DescriptorBuffer = struct {
size: vk.DeviceSize, size: vk.DeviceSize,
}; };
const DescriptorTexture = struct {
sampler: ?*SoftSampler,
view: ?*SoftImageView,
};
const DescriptorImage = struct {
object: ?*SoftImage,
};
const Descriptor = union(enum) { const Descriptor = union(enum) {
buffer: []DescriptorBuffer, buffer: []DescriptorBuffer,
image: struct {}, texture: []DescriptorTexture,
image: []DescriptorImage,
unsupported: struct {},
}; };
interface: Interface, interface: Interface,
@@ -48,6 +63,7 @@ pub fn create(device: *base.Device, allocator: std.mem.Allocator, layout: *base.
for (layout.bindings) |binding| { for (layout.bindings) |binding| {
const struct_size: usize = switch (binding.descriptor_type) { const struct_size: usize = switch (binding.descriptor_type) {
.storage_buffer, .storage_buffer_dynamic => @sizeOf(DescriptorBuffer), .storage_buffer, .storage_buffer_dynamic => @sizeOf(DescriptorBuffer),
.storage_image, .input_attachment => @sizeOf(DescriptorImage),
else => 0, else => 0,
}; };
@@ -68,6 +84,9 @@ pub fn create(device: *base.Device, allocator: std.mem.Allocator, layout: *base.
.storage_buffer, .storage_buffer_dynamic => descriptor.* = .{ .storage_buffer, .storage_buffer_dynamic => descriptor.* = .{
.buffer = local_allocator.alloc(DescriptorBuffer, binding.array_size) catch return VkError.OutOfHostMemory, .buffer = local_allocator.alloc(DescriptorBuffer, binding.array_size) catch return VkError.OutOfHostMemory,
}, },
.storage_image, .input_attachment => descriptor.* = .{
.image = local_allocator.alloc(DescriptorImage, binding.array_size) catch return VkError.OutOfHostMemory,
},
else => {}, else => {},
} }
} }
@@ -113,6 +132,19 @@ pub fn write(interface: *Interface, write_data: vk.WriteDescriptorSet) VkError!v
} }
} }
}, },
else => base.unsupported("descriptor type {s} for writting", .{@tagName(write_data.descriptor_type)}), .storage_image, .input_attachment => {
for (write_data.p_image_info, 0..write_data.descriptor_count) |image_info, i| {
const desc = &self.descriptors[write_data.dst_binding].image[i];
desc.* = .{ .object = null };
if (image_info.image_view != .null_handle) {
const image_view = try NonDispatchable(ImageView).fromHandleObject(image_info.image_view);
desc.object = @as(*SoftImage, @alignCast(@fieldParentPtr("interface", image_view.image)));
}
}
},
else => {
self.descriptors[write_data.dst_binding] = .{ .unsupported = .{} };
base.unsupported("descriptor type {s} for writting", .{@tagName(write_data.descriptor_type)});
},
} }
} }

View File

@@ -2,10 +2,9 @@ const std = @import("std");
const vk = @import("vulkan"); const vk = @import("vulkan");
const base = @import("base"); const base = @import("base");
const builtin = @import("builtin"); const builtin = @import("builtin");
const config = @import("config"); const config = base.config;
const SoftQueue = @import("SoftQueue.zig"); const SoftQueue = @import("SoftQueue.zig");
const Blitter = @import("device/Blitter.zig");
pub const SoftBinarySemaphore = @import("SoftBinarySemaphore.zig"); pub const SoftBinarySemaphore = @import("SoftBinarySemaphore.zig");
pub const SoftBuffer = @import("SoftBuffer.zig"); pub const SoftBuffer = @import("SoftBuffer.zig");
@@ -19,6 +18,7 @@ pub const SoftEvent = @import("SoftEvent.zig");
pub const SoftFence = @import("SoftFence.zig"); pub const SoftFence = @import("SoftFence.zig");
pub const SoftFramebuffer = @import("SoftFramebuffer.zig"); pub const SoftFramebuffer = @import("SoftFramebuffer.zig");
pub const SoftImage = @import("SoftImage.zig"); pub const SoftImage = @import("SoftImage.zig");
pub const SoftInstance = @import("SoftInstance.zig");
pub const SoftImageView = @import("SoftImageView.zig"); pub const SoftImageView = @import("SoftImageView.zig");
pub const SoftPipeline = @import("SoftPipeline.zig"); pub const SoftPipeline = @import("SoftPipeline.zig");
pub const SoftPipelineCache = @import("SoftPipelineCache.zig"); pub const SoftPipelineCache = @import("SoftPipelineCache.zig");
@@ -35,16 +35,20 @@ pub const Interface = base.Device;
const SpawnError = std.Thread.SpawnError; const SpawnError = std.Thread.SpawnError;
interface: Interface, const DeviceAllocator = struct {
device_allocator: if (config.debug_allocator) std.heap.DebugAllocator(.{}) else std.heap.ThreadSafeAllocator, pub inline fn allocator(_: @This()) std.mem.Allocator {
workers: std.Thread.Pool, return std.heap.smp_allocator;
blitter: Blitter, }
};
pub fn create(physical_device: *base.PhysicalDevice, allocator: std.mem.Allocator, info: *const vk.DeviceCreateInfo) VkError!*Self { interface: Interface,
device_allocator: if (config.debug_allocator) std.heap.DebugAllocator(.{}) else DeviceAllocator,
pub fn create(instance: *base.Instance, physical_device: *base.PhysicalDevice, allocator: std.mem.Allocator, info: *const vk.DeviceCreateInfo) VkError!*Self {
const self = allocator.create(Self) catch return VkError.OutOfHostMemory; const self = allocator.create(Self) catch return VkError.OutOfHostMemory;
errdefer allocator.destroy(self); errdefer allocator.destroy(self);
var interface = try Interface.init(allocator, physical_device, info); var interface = try Interface.init(allocator, instance, physical_device, info);
interface.vtable = &.{ interface.vtable = &.{
.createQueue = SoftQueue.create, .createQueue = SoftQueue.create,
@@ -77,14 +81,7 @@ pub fn create(physical_device: *base.PhysicalDevice, allocator: std.mem.Allocato
self.* = .{ self.* = .{
.interface = interface, .interface = interface,
.device_allocator = if (config.debug_allocator) .init else .{ .child_allocator = std.heap.c_allocator }, // TODO: better device allocator .device_allocator = if (config.debug_allocator) .init else .{},
.workers = undefined,
.blitter = .init,
};
self.workers.init(.{ .allocator = self.device_allocator.allocator() }) catch |err| return switch (err) {
SpawnError.OutOfMemory, SpawnError.LockedMemoryLimitExceeded => VkError.OutOfDeviceMemory,
else => VkError.Unknown,
}; };
try self.interface.createQueues(allocator, info); try self.interface.createQueues(allocator, info);
@@ -93,16 +90,14 @@ pub fn create(physical_device: *base.PhysicalDevice, allocator: std.mem.Allocato
pub fn destroy(interface: *Interface, allocator: std.mem.Allocator) VkError!void { pub fn destroy(interface: *Interface, allocator: std.mem.Allocator) VkError!void {
const self: *Self = @alignCast(@fieldParentPtr("interface", interface)); const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
self.workers.deinit();
if (config.debug_allocator) { if (config.debug_allocator) {
// All device memory allocations should've been freed by now // All device memory allocations should've been freed by now
if (!self.device_allocator.detectLeaks()) { if (!self.device_allocator.detectLeaks()) {
std.log.scoped(.vkDestroyDevice).debug("No device memory leaks detected", .{}); std.log.scoped(.vkDestroyDevice).debug("No device memory leaks detected", .{});
} }
}
allocator.destroy(self); allocator.destroy(self);
}
} }
pub fn allocateMemory(interface: *Interface, allocator: std.mem.Allocator, info: *const vk.MemoryAllocateInfo) VkError!*base.DeviceMemory { pub fn allocateMemory(interface: *Interface, allocator: std.mem.Allocator, info: *const vk.MemoryAllocateInfo) VkError!*base.DeviceMemory {

View File

@@ -9,8 +9,8 @@ const Self = @This();
pub const Interface = base.Event; pub const Interface = base.Event;
interface: Interface, interface: Interface,
mutex: std.Thread.Mutex, mutex: std.Io.Mutex,
condition: std.Thread.Condition, condition: std.Io.Condition,
is_signaled: bool, is_signaled: bool,
pub fn create(device: *base.Device, allocator: std.mem.Allocator, info: *const vk.EventCreateInfo) VkError!*Self { pub fn create(device: *base.Device, allocator: std.mem.Allocator, info: *const vk.EventCreateInfo) VkError!*Self {
@@ -29,8 +29,8 @@ pub fn create(device: *base.Device, allocator: std.mem.Allocator, info: *const v
self.* = .{ self.* = .{
.interface = interface, .interface = interface,
.mutex = std.Thread.Mutex{}, .mutex = .init,
.condition = std.Thread.Condition{}, .condition = .init,
.is_signaled = false, .is_signaled = false,
}; };
return self; return self;
@@ -43,9 +43,10 @@ pub fn destroy(interface: *Interface, allocator: std.mem.Allocator) void {
pub fn getStatus(interface: *Interface) VkError!void { pub fn getStatus(interface: *Interface) VkError!void {
const self: *Self = @alignCast(@fieldParentPtr("interface", interface)); const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
const io = interface.owner.io();
self.mutex.lock(); self.mutex.lock(io) catch return VkError.DeviceLost;
defer self.mutex.unlock(); defer self.mutex.unlock(io);
if (!self.is_signaled) { if (!self.is_signaled) {
return VkError.EventReset; return VkError.EventReset;
@@ -54,35 +55,41 @@ pub fn getStatus(interface: *Interface) VkError!void {
pub fn reset(interface: *Interface) VkError!void { pub fn reset(interface: *Interface) VkError!void {
const self: *Self = @alignCast(@fieldParentPtr("interface", interface)); const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
const io = interface.owner.io();
self.mutex.lock(); self.mutex.lock(io) catch return VkError.DeviceLost;
defer self.mutex.unlock(); defer self.mutex.unlock(io);
self.is_signaled = false; self.is_signaled = false;
} }
pub fn signal(interface: *Interface) VkError!void { pub fn signal(interface: *Interface) VkError!void {
const self: *Self = @alignCast(@fieldParentPtr("interface", interface)); const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
const io = interface.owner.io();
self.mutex.lock(); self.mutex.lock(io) catch return VkError.DeviceLost;
defer self.mutex.unlock(); defer self.mutex.unlock(io);
self.is_signaled = true; self.is_signaled = true;
self.condition.broadcast(); self.condition.broadcast(io);
} }
pub fn wait(interface: *Interface, timeout: u64) VkError!void { pub fn wait(interface: *Interface, timeout: u64) VkError!void {
const self: *Self = @alignCast(@fieldParentPtr("interface", interface)); const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
const io = interface.owner.io();
self.mutex.lock(); self.mutex.lock(io) catch return VkError.DeviceLost;
defer self.mutex.unlock(); defer self.mutex.unlock(io);
if (self.is_signaled) return; if (self.is_signaled) return;
if (timeout == 0) return VkError.Timeout; if (timeout == 0) return VkError.Timeout;
if (timeout == std.math.maxInt(@TypeOf(timeout))) { if (timeout != std.math.maxInt(@TypeOf(timeout))) {
self.condition.wait(&self.mutex); const duration: std.Io.Clock.Duration = .{
} else { .raw = .fromNanoseconds(@intCast(timeout)),
self.condition.timedWait(&self.mutex, timeout) catch return VkError.Timeout; .clock = .cpu_process,
};
duration.sleep(io) catch return VkError.DeviceLost;
} }
self.condition.wait(io, &self.mutex) catch return VkError.DeviceLost;
} }

View File

@@ -9,8 +9,8 @@ const Self = @This();
pub const Interface = base.Fence; pub const Interface = base.Fence;
interface: Interface, interface: Interface,
mutex: std.Thread.Mutex, mutex: std.Io.Mutex,
condition: std.Thread.Condition, condition: std.Io.Condition,
is_signaled: bool, is_signaled: bool,
pub fn create(device: *Device, allocator: std.mem.Allocator, info: *const vk.FenceCreateInfo) VkError!*Self { pub fn create(device: *Device, allocator: std.mem.Allocator, info: *const vk.FenceCreateInfo) VkError!*Self {
@@ -29,8 +29,8 @@ pub fn create(device: *Device, allocator: std.mem.Allocator, info: *const vk.Fen
self.* = .{ self.* = .{
.interface = interface, .interface = interface,
.mutex = std.Thread.Mutex{}, .mutex = .init,
.condition = std.Thread.Condition{}, .condition = .init,
.is_signaled = info.flags.signaled_bit, .is_signaled = info.flags.signaled_bit,
}; };
return self; return self;
@@ -55,26 +55,31 @@ pub fn reset(interface: *Interface) VkError!void {
pub fn signal(interface: *Interface) VkError!void { pub fn signal(interface: *Interface) VkError!void {
const self: *Self = @alignCast(@fieldParentPtr("interface", interface)); const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
const io = interface.owner.io();
self.mutex.lock(); self.mutex.lock(io) catch return VkError.DeviceLost;
defer self.mutex.unlock(); defer self.mutex.unlock(io);
self.is_signaled = true; self.is_signaled = true;
self.condition.broadcast(); self.condition.broadcast(io);
} }
pub fn wait(interface: *Interface, timeout: u64) VkError!void { pub fn wait(interface: *Interface, timeout: u64) VkError!void {
const self: *Self = @alignCast(@fieldParentPtr("interface", interface)); const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
const io = interface.owner.io();
self.mutex.lock(); self.mutex.lock(io) catch return VkError.DeviceLost;
defer self.mutex.unlock(); defer self.mutex.unlock(io);
if (self.is_signaled) return; if (self.is_signaled) return;
if (timeout == 0) return VkError.Timeout; if (timeout == 0) return VkError.Timeout;
if (timeout == std.math.maxInt(@TypeOf(timeout))) { if (timeout != std.math.maxInt(@TypeOf(timeout))) {
self.condition.wait(&self.mutex); const duration: std.Io.Clock.Duration = .{
} else { .raw = .fromNanoseconds(@intCast(timeout)),
self.condition.timedWait(&self.mutex, timeout) catch return VkError.Timeout; .clock = .cpu_process,
};
duration.sleep(io) catch return VkError.DeviceLost;
} }
self.condition.wait(io, &self.mutex) catch return VkError.DeviceLost;
} }

View File

@@ -24,6 +24,7 @@ pub fn create(device: *base.Device, allocator: std.mem.Allocator, info: *const v
interface.vtable = &.{ interface.vtable = &.{
.destroy = destroy, .destroy = destroy,
.getMemoryRequirements = getMemoryRequirements, .getMemoryRequirements = getMemoryRequirements,
.getTotalSizeForAspect = getTotalSizeForAspect,
}; };
self.* = .{ self.* = .{
@@ -37,46 +38,172 @@ pub fn destroy(interface: *Interface, allocator: std.mem.Allocator) void {
allocator.destroy(self); allocator.destroy(self);
} }
pub fn getMemoryRequirements(interface: *Interface, requirements: *vk.MemoryRequirements) void { pub fn getMemoryRequirements(_: *Interface, requirements: *vk.MemoryRequirements) VkError!void {
_ = interface;
requirements.alignment = lib.MEMORY_REQUIREMENTS_IMAGE_ALIGNMENT; requirements.alignment = lib.MEMORY_REQUIREMENTS_IMAGE_ALIGNMENT;
} }
inline fn clear(self: *Self, pixel: vk.ClearValue, format: vk.Format, view_format: vk.Format, range: vk.ImageSubresourceRange, area: ?vk.Rect2D) void { pub fn getClearFormat(self: *Self) VkError!vk.Format {
const soft_device: *SoftDevice = @alignCast(@fieldParentPtr("interface", self.interface.owner)); return if (base.vku.vkuFormatIsSINT(@intCast(@intFromEnum(self.interface.format))))
soft_device.blitter.clear(pixel, format, self, view_format, range, area);
}
pub fn clearRange(self: *Self, color: vk.ClearColorValue, range: vk.ImageSubresourceRange) void {
std.debug.assert(range.aspect_mask == vk.ImageAspectFlags{ .color_bit = true });
const clear_format: vk.Format = if (base.vku.vkuFormatIsSINT(@intCast(@intFromEnum(self.interface.format))))
.r32g32b32a32_sint .r32g32b32a32_sint
else if (base.vku.vkuFormatIsUINT(@intCast(@intFromEnum(self.interface.format)))) else if (base.vku.vkuFormatIsUINT(@intCast(@intFromEnum(self.interface.format))))
.r32g32b32a32_uint .r32g32b32a32_uint
else else
.r32g32b32a32_sfloat; .r32g32b32a32_sfloat;
self.clear(.{ .color = color }, clear_format, self.interface.format, range, null);
} }
pub fn copyImage(self: *const Self, self_layout: vk.ImageLayout, dst: *Self, dst_layout: vk.ImageLayout, regions: []const vk.ImageCopy) VkError!void { /// Based on SwiftShader vk::Image::copyTo
_ = self; pub fn copyToImage(self: *const Self, dst: *Self, region: vk.ImageCopy) VkError!void {
_ = self_layout; const combined_depth_stencil_aspect: vk.ImageAspectFlags = .{
_ = dst; .depth_bit = true,
_ = dst_layout; .stencil_bit = true,
_ = regions; };
std.log.scoped(.commandExecutor).warn("FIXME: implement image to image copy", .{});
if (region.src_subresource.aspect_mask == combined_depth_stencil_aspect and
region.dst_subresource.aspect_mask == combined_depth_stencil_aspect)
{
var single_aspect_region = region;
single_aspect_region.src_subresource.aspect_mask = .{ .depth_bit = true };
single_aspect_region.dst_subresource.aspect_mask = .{ .depth_bit = true };
try self.copyToImageSingleAspect(dst, single_aspect_region);
single_aspect_region.src_subresource.aspect_mask = .{ .stencil_bit = true };
single_aspect_region.dst_subresource.aspect_mask = .{ .stencil_bit = true };
try self.copyToImageSingleAspect(dst, single_aspect_region);
} else {
try self.copyToImageSingleAspect(dst, region);
}
}
/// Based on SwiftShader vk::Image::copySingleAspectTo
pub fn copyToImageSingleAspect(self: *const Self, dst: *Self, region: vk.ImageCopy) VkError!void {
if (!(region.src_subresource.aspect_mask == vk.ImageAspectFlags{ .color_bit = true } or
region.src_subresource.aspect_mask == vk.ImageAspectFlags{ .depth_bit = true } or
region.src_subresource.aspect_mask == vk.ImageAspectFlags{ .stencil_bit = true }))
{
base.unsupported("src subresource aspectMask {f}", .{region.src_subresource.aspect_mask});
return VkError.ValidationFailed;
}
if (!(region.dst_subresource.aspect_mask == vk.ImageAspectFlags{ .color_bit = true } or
region.dst_subresource.aspect_mask == vk.ImageAspectFlags{ .depth_bit = true } or
region.dst_subresource.aspect_mask == vk.ImageAspectFlags{ .stencil_bit = true }))
{
base.unsupported("dst subresource aspectMask {f}", .{region.dst_subresource.aspect_mask});
return VkError.ValidationFailed;
}
const src_format = self.interface.formatFromAspect(region.src_subresource.aspect_mask);
const bytes_per_block = base.format.texelSize(src_format);
const src_extent = self.getMipLevelExtent(region.src_subresource.mip_level);
const dst_extent = dst.getMipLevelExtent(region.src_subresource.mip_level);
const one_is_3D = (self.interface.image_type == .@"3d") != (dst.interface.image_type == .@"3d");
const both_are_3D = (self.interface.image_type == .@"3d") and (dst.interface.image_type == .@"3d");
const src_row_pitch_bytes = self.getRowPitchMemSizeForMipLevel(region.src_subresource.aspect_mask, region.src_subresource.mip_level);
const src_depth_pitch_bytes = self.getSliceMemSizeForMipLevel(region.src_subresource.aspect_mask, region.src_subresource.mip_level);
const dst_row_pitch_bytes = dst.getRowPitchMemSizeForMipLevel(region.dst_subresource.aspect_mask, region.dst_subresource.mip_level);
const dst_depth_pitch_bytes = dst.getSliceMemSizeForMipLevel(region.dst_subresource.aspect_mask, region.dst_subresource.mip_level);
const src_array_pitch = self.getLayerSize(region.src_subresource.aspect_mask);
const dst_array_pitch = dst.getLayerSize(region.dst_subresource.aspect_mask);
const src_layer_pitch = if (self.interface.image_type == .@"3d") src_depth_pitch_bytes else src_array_pitch;
const dst_layer_pitch = if (dst.interface.image_type == .@"3d") dst_depth_pitch_bytes else dst_array_pitch;
// If one image is 3D, extent.depth must match the layer count. If both images are 2D,
// depth is 1 but the source and destination subresource layer count must match.
const layer_count = if (one_is_3D) region.extent.depth else region.src_subresource.layer_count;
// Copies between 2D and 3D images are treated as layers, so only use depth as the slice count when both images are 3D.
const slice_count = if (both_are_3D) region.extent.depth else self.interface.samples.toInt();
const is_single_slice = (slice_count == 1);
const is_single_row = (region.extent.height == 1) and is_single_slice;
// In order to copy multiple rows using a single memcpy call, we
// have to make sure that we need to copy the entire row and that
// both source and destination rows have the same size in bytes
const is_entire_row = (region.extent.width == src_extent.width) and (region.extent.width == dst_extent.width);
// In order to copy multiple slices using a single memcpy call, we
// have to make sure that we need to copy the entire slice and that
// both source and destination slices have the same size in bytes
const is_entire_slice = is_entire_row and
(region.extent.height == src_extent.height) and
(region.extent.height == dst_extent.height) and
(src_depth_pitch_bytes == dst_depth_pitch_bytes);
const src_texel_offset = try self.getTexelMemoryOffset(region.src_offset, .{
.aspect_mask = region.src_subresource.aspect_mask,
.mip_level = region.src_subresource.mip_level,
.array_layer = region.src_subresource.base_array_layer,
});
const src_size = try self.interface.getTotalSizeForAspect(region.src_subresource.aspect_mask) - src_texel_offset;
const src_memory = if (self.interface.memory) |memory| memory else return VkError.InvalidDeviceMemoryDrv;
var src_map: []u8 = @as([*]u8, @ptrCast(try src_memory.map(self.interface.memory_offset + src_texel_offset, src_size)))[0..src_size];
const dst_texel_offset = try self.getTexelMemoryOffset(region.dst_offset, .{
.aspect_mask = region.dst_subresource.aspect_mask,
.mip_level = region.dst_subresource.mip_level,
.array_layer = region.dst_subresource.base_array_layer,
});
const dst_size = try dst.interface.getTotalSizeForAspect(region.dst_subresource.aspect_mask) - dst_texel_offset;
const dst_memory = if (dst.interface.memory) |memory| memory else return VkError.InvalidDeviceMemoryDrv;
var dst_map: []u8 = @as([*]u8, @ptrCast(try dst_memory.map(self.interface.memory_offset + dst_texel_offset, dst_size)))[0..dst_size];
for (0..layer_count) |_| {
if (is_single_row) {
const copy_size = region.extent.width * bytes_per_block;
@memcpy(dst_map[0..copy_size], src_map[0..copy_size]);
} else if (is_entire_row and is_single_slice) {
const copy_size = region.extent.height * src_row_pitch_bytes;
@memcpy(dst_map[0..copy_size], src_map[0..copy_size]);
} else if (is_entire_slice) {
const copy_size = slice_count * src_depth_pitch_bytes;
@memcpy(dst_map[0..copy_size], src_map[0..copy_size]);
} else if (is_entire_row) {
const slice_size = region.extent.height * src_row_pitch_bytes;
var src_slice_memory = src_map[0..];
var dst_slice_memory = dst_map[0..];
for (0..slice_count) |_| {
@memcpy(dst_slice_memory[0..slice_size], src_slice_memory[0..slice_size]);
src_slice_memory = src_slice_memory[src_depth_pitch_bytes..];
dst_slice_memory = dst_slice_memory[dst_depth_pitch_bytes..];
}
} else {
const row_size = region.extent.width * bytes_per_block;
var src_slice_memory = src_map[0..];
var dst_slice_memory = dst_map[0..];
for (0..slice_count) |_| {
var src_row_memory = src_slice_memory[0..];
var dst_row_memory = dst_slice_memory[0..];
for (0..region.extent.height) |_| {
@memcpy(dst_row_memory[0..row_size], src_row_memory[0..row_size]);
src_row_memory = src_row_memory[src_row_pitch_bytes..];
dst_row_memory = dst_row_memory[dst_row_pitch_bytes..];
}
}
}
src_map = src_map[src_layer_pitch..];
dst_map = dst_map[dst_layer_pitch..];
}
} }
pub fn copyToBuffer(self: *const Self, dst: *SoftBuffer, region: vk.BufferImageCopy) VkError!void { pub fn copyToBuffer(self: *const Self, dst: *SoftBuffer, region: vk.BufferImageCopy) VkError!void {
const dst_size = dst.interface.size - region.buffer_offset; const dst_size = dst.interface.size - region.buffer_offset;
const dst_offset = dst.interface.offset + region.buffer_offset;
const dst_memory = if (dst.interface.memory) |memory| memory else return VkError.InvalidDeviceMemoryDrv; const dst_memory = if (dst.interface.memory) |memory| memory else return VkError.InvalidDeviceMemoryDrv;
const dst_map: []u8 = @as([*]u8, @ptrCast(try dst_memory.map(region.buffer_offset, dst_size)))[0..dst_size]; const dst_map: []u8 = @as([*]u8, @ptrCast(try dst_memory.map(dst_offset, dst_size)))[0..dst_size];
try self.copy( try self.copy(
null, null,
dst_map, dst_map,
@intCast(region.buffer_row_length),
@intCast(region.buffer_image_height),
region.image_subresource, region.image_subresource,
region.image_offset, region.image_offset,
region.image_extent, region.image_extent,
@@ -85,35 +212,196 @@ pub fn copyToBuffer(self: *const Self, dst: *SoftBuffer, region: vk.BufferImageC
pub fn copyFromBuffer(self: *const Self, src: *const SoftBuffer, region: vk.BufferImageCopy) VkError!void { pub fn copyFromBuffer(self: *const Self, src: *const SoftBuffer, region: vk.BufferImageCopy) VkError!void {
const src_size = src.interface.size - region.buffer_offset; const src_size = src.interface.size - region.buffer_offset;
const src_offset = src.interface.offset + region.buffer_offset;
const src_memory = if (src.interface.memory) |memory| memory else return VkError.InvalidDeviceMemoryDrv; const src_memory = if (src.interface.memory) |memory| memory else return VkError.InvalidDeviceMemoryDrv;
const src_map: []u8 = @as([*]u8, @ptrCast(try src_memory.map(region.buffer_offset, src_size)))[0..src_size]; const src_map: []u8 = @as([*]u8, @ptrCast(try src_memory.map(src_offset, src_size)))[0..src_size];
try self.copy( try self.copy(
src_map, src_map,
null, null,
@intCast(region.buffer_row_length),
@intCast(region.buffer_image_height),
region.image_subresource, region.image_subresource,
region.image_offset, region.image_offset,
region.image_extent, region.image_extent,
); );
} }
/// Based on SwiftShader vk::Image::copy
pub fn copy( pub fn copy(
self: *const Self, self: *const Self,
src_memory: ?[]const u8, base_src_memory: ?[]const u8,
dst_memory: ?[]u8, base_dst_memory: ?[]u8,
row_len: usize,
image_height: usize,
image_subresource: vk.ImageSubresourceLayers, image_subresource: vk.ImageSubresourceLayers,
image_copy_offset: vk.Offset3D, image_offset: vk.Offset3D,
image_copy_extent: vk.Extent3D, image_extent: vk.Extent3D,
) VkError!void { ) VkError!void {
_ = self; std.debug.assert((base_src_memory == null) != (base_dst_memory == null));
_ = src_memory;
_ = dst_memory; const is_source: bool = base_src_memory != null;
_ = row_len;
_ = image_height; if (image_subresource.aspect_mask.subtract(.{
_ = image_subresource; .color_bit = true,
_ = image_copy_offset; .depth_bit = true,
_ = image_copy_extent; .stencil_bit = true,
}).toInt() != 0) {
base.unsupported("aspectMask {f}", .{image_subresource.aspect_mask});
return VkError.ValidationFailed;
}
const format = self.interface.formatFromAspect(image_subresource.aspect_mask);
// TODO: handle extent of compressed formats
if (image_extent.width == 0 or image_extent.height == 0 or image_extent.depth == 0) {
return;
}
const bytes_per_block = base.format.texelSize(format);
const memory_row_pitch_bytes = image_extent.width * bytes_per_block;
const memory_slice_pitch_bytes = image_extent.height * memory_row_pitch_bytes;
const image_texel_offset = try self.getTexelMemoryOffset(image_offset, .{
.aspect_mask = image_subresource.aspect_mask,
.mip_level = image_subresource.mip_level,
.array_layer = image_subresource.base_array_layer,
});
const image_size = try self.interface.getTotalSizeForAspect(image_subresource.aspect_mask) - image_texel_offset;
const image_memory = if (self.interface.memory) |memory| memory else return VkError.InvalidDeviceMemoryDrv;
const image_map: []u8 = @as([*]u8, @ptrCast(try image_memory.map(self.interface.memory_offset + image_texel_offset, image_size)))[0..image_size];
var src_memory = if (is_source) base_src_memory orelse return VkError.InvalidDeviceMemoryDrv else image_map;
var dst_memory = if (is_source) image_map else base_dst_memory orelse return VkError.InvalidDeviceMemoryDrv;
const src_slice_pitch_bytes = if (is_source) memory_slice_pitch_bytes else self.getSliceMemSizeForMipLevel(image_subresource.aspect_mask, image_subresource.mip_level);
const dst_slice_pitch_bytes = if (is_source) self.getSliceMemSizeForMipLevel(image_subresource.aspect_mask, image_subresource.mip_level) else memory_slice_pitch_bytes;
const src_row_pitch_bytes = if (is_source) memory_row_pitch_bytes else self.getRowPitchMemSizeForMipLevel(image_subresource.aspect_mask, image_subresource.mip_level);
const dst_row_pitch_bytes = if (is_source) self.getRowPitchMemSizeForMipLevel(image_subresource.aspect_mask, image_subresource.mip_level) else memory_row_pitch_bytes;
const src_layer_size = if (is_source) memory_slice_pitch_bytes else self.getLayerSize(image_subresource.aspect_mask);
const dst_layer_size = if (is_source) self.getLayerSize(image_subresource.aspect_mask) else memory_slice_pitch_bytes;
const layer_count = if (image_subresource.layer_count == vk.REMAINING_ARRAY_LAYERS) self.interface.array_layers - image_subresource.base_array_layer else image_subresource.layer_count;
const copy_size = image_extent.width * bytes_per_block;
for (0..layer_count) |_| {
var src_layer_memory = src_memory[0..];
var dst_layer_memory = dst_memory[0..];
for (0..image_extent.depth) |_| {
var src_slice_memory = src_layer_memory[0..];
var dst_slice_memory = dst_layer_memory[0..];
for (0..image_extent.height) |_| {
@memcpy(dst_slice_memory[0..copy_size], src_slice_memory[0..copy_size]);
src_slice_memory = src_slice_memory[src_row_pitch_bytes..];
dst_slice_memory = dst_slice_memory[dst_row_pitch_bytes..];
}
src_layer_memory = src_layer_memory[src_slice_pitch_bytes..];
dst_layer_memory = dst_layer_memory[dst_slice_pitch_bytes..];
}
src_memory = src_memory[src_layer_size..];
dst_memory = dst_memory[dst_layer_size..];
}
}
pub fn getTexelMemoryOffsetInSubresource(self: *const Self, offset: vk.Offset3D, subresource: vk.ImageSubresource) usize {
return @as(usize, @intCast(offset.z)) * self.getSliceMemSizeForMipLevel(subresource.aspect_mask, subresource.mip_level) +
@as(usize, @intCast(offset.y)) * self.getRowPitchMemSizeForMipLevel(subresource.aspect_mask, subresource.mip_level) +
@as(usize, @intCast(offset.x)) * base.format.texelSize(base.format.fromAspect(self.interface.format, subresource.aspect_mask));
}
pub fn getTexelMemoryOffset(self: *const Self, offset: vk.Offset3D, subresource: vk.ImageSubresource) VkError!usize {
return self.getTexelMemoryOffsetInSubresource(offset, subresource) + try self.getSubresourceOffset(subresource.aspect_mask, subresource.mip_level, subresource.array_layer);
}
fn getSubresourceOffset(self: *const Self, aspect_mask: vk.ImageAspectFlags, mip_level: u32, layer: u32) VkError!usize {
var offset = try self.getAspectOffset(aspect_mask);
for (0..mip_level) |mip| {
offset += self.getMultiSampledLevelSize(aspect_mask, @intCast(mip));
}
const is_3D = (self.interface.image_type == .@"3d") and self.interface.flags.@"2d_array_compatible_bit";
const layer_offset = if (is_3D)
self.getSliceMemSizeForMipLevel(aspect_mask, mip_level)
else
self.getLayerSize(aspect_mask);
return offset + layer * layer_offset;
}
fn getAspectOffset(self: *const Self, aspect_mask: vk.ImageAspectFlags) VkError!usize {
return switch (self.interface.format) {
.d16_unorm_s8_uint,
.d24_unorm_s8_uint,
.d32_sfloat_s8_uint,
=> if (aspect_mask.stencil_bit)
try self.interface.getTotalSizeForAspect(.{ .depth_bit = true })
else
0,
else => 0,
};
}
fn getTotalSizeForAspect(interface: *const Interface, aspect_mask: vk.ImageAspectFlags) VkError!usize {
const self: *const Self = @alignCast(@fieldParentPtr("interface", interface));
if (aspect_mask.subtract(.{
.color_bit = true,
.depth_bit = true,
.stencil_bit = true,
}).toInt() != 0) {
base.unsupported("aspectMask {f}", .{aspect_mask});
return VkError.ValidationFailed;
}
var size: usize = 0;
if (aspect_mask.color_bit)
size += self.getLayerSize(.{ .color_bit = true });
if (aspect_mask.depth_bit)
size += self.getLayerSize(.{ .depth_bit = true });
if (aspect_mask.stencil_bit)
size += self.getLayerSize(.{ .stencil_bit = true });
return size * self.interface.array_layers;
}
pub fn getLayerSize(self: *const Self, aspect_mask: vk.ImageAspectFlags) usize {
var size: usize = 0;
for (0..self.interface.mip_levels) |mip_level| {
size += self.getMultiSampledLevelSize(aspect_mask, @intCast(mip_level));
}
return size;
}
pub inline fn getMultiSampledLevelSize(self: *const Self, aspect_mask: vk.ImageAspectFlags, mip_level: u32) usize {
return self.getMipLevelSize(aspect_mask, mip_level) * self.interface.samples.toInt();
}
pub inline fn getMipLevelSize(self: *const Self, aspect_mask: vk.ImageAspectFlags, mip_level: u32) usize {
return self.getSliceMemSizeForMipLevel(aspect_mask, mip_level) * self.getMipLevelExtent(mip_level).depth;
}
pub fn getMipLevelExtent(self: *const Self, mip_level: u32) vk.Extent3D {
var extent: vk.Extent3D = .{
.width = self.interface.extent.width >> @intCast(mip_level),
.height = self.interface.extent.height >> @intCast(mip_level),
.depth = self.interface.extent.depth >> @intCast(mip_level),
};
if (extent.width == 0) extent.width = 1;
if (extent.height == 0) extent.height = 1;
if (extent.depth == 0) extent.depth = 1;
return extent;
}
pub fn getSliceMemSizeForMipLevel(self: *const Self, aspect_mask: vk.ImageAspectFlags, mip_level: u32) usize {
const mip_extent = self.getMipLevelExtent(mip_level);
const format = self.interface.formatFromAspect(aspect_mask);
return base.format.sliceMemSize(format, mip_extent.width, mip_extent.height);
}
pub fn getRowPitchMemSizeForMipLevel(self: *const Self, aspect_mask: vk.ImageAspectFlags, mip_level: u32) usize {
const mip_extent = self.getMipLevelExtent(mip_level);
const format = self.interface.formatFromAspect(aspect_mask);
return base.format.pitchMemSize(format, mip_extent.width);
} }

View File

@@ -11,6 +11,8 @@ const Self = @This();
pub const Interface = base.Instance; pub const Interface = base.Instance;
interface: Interface, interface: Interface,
threaded: std.Io.Threaded,
allocator: std.mem.Allocator,
fn castExtension(comptime ext: vk.ApiInfo) vk.ExtensionProperties { fn castExtension(comptime ext: vk.ApiInfo) vk.ExtensionProperties {
var props: vk.ExtensionProperties = .{ var props: vk.ExtensionProperties = .{
@@ -29,6 +31,9 @@ pub fn create(allocator: std.mem.Allocator, infos: *const vk.InstanceCreateInfo)
const self = allocator.create(Self) catch return VkError.OutOfHostMemory; const self = allocator.create(Self) catch return VkError.OutOfHostMemory;
errdefer allocator.destroy(self); errdefer allocator.destroy(self);
self.allocator = std.heap.smp_allocator;
self.threaded = std.Io.Threaded.init(self.allocator, .{});
self.interface = try base.Instance.init(allocator, infos); self.interface = try base.Instance.init(allocator, infos);
self.interface.dispatch_table = &.{ self.interface.dispatch_table = &.{
.destroy = destroy, .destroy = destroy,
@@ -36,12 +41,14 @@ pub fn create(allocator: std.mem.Allocator, infos: *const vk.InstanceCreateInfo)
self.interface.vtable = &.{ self.interface.vtable = &.{
.requestPhysicalDevices = requestPhysicalDevices, .requestPhysicalDevices = requestPhysicalDevices,
.releasePhysicalDevices = releasePhysicalDevices, .releasePhysicalDevices = releasePhysicalDevices,
.io = io,
}; };
return &self.interface; return &self.interface;
} }
fn destroy(interface: *Interface, allocator: std.mem.Allocator) VkError!void { fn destroy(interface: *Interface, allocator: std.mem.Allocator) VkError!void {
const self: *Self = @alignCast(@fieldParentPtr("interface", interface)); const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
self.threaded.deinit();
allocator.destroy(self); allocator.destroy(self);
} }
@@ -60,3 +67,8 @@ fn releasePhysicalDevices(interface: *Interface, allocator: std.mem.Allocator) V
interface.physical_devices.deinit(allocator); interface.physical_devices.deinit(allocator);
interface.physical_devices = .empty; interface.physical_devices = .empty;
} }
fn io(interface: *Interface) std.Io {
const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
return self.threaded.io();
}

View File

@@ -17,7 +17,7 @@ var device_name = [_]u8{0} ** vk.MAX_PHYSICAL_DEVICE_NAME_SIZE;
interface: Interface, interface: Interface,
pub fn create(allocator: std.mem.Allocator, instance: *const base.Instance) VkError!*Self { pub fn create(allocator: std.mem.Allocator, instance: *base.Instance) VkError!*Self {
const command_allocator = VulkanAllocator.from(allocator).cloneWithScope(.command).allocator(); const command_allocator = VulkanAllocator.from(allocator).cloneWithScope(.command).allocator();
const self = allocator.create(Self) catch return VkError.OutOfHostMemory; const self = allocator.create(Self) catch return VkError.OutOfHostMemory;
@@ -224,7 +224,7 @@ pub fn destroy(interface: *Interface, allocator: std.mem.Allocator) VkError!void
} }
pub fn createDevice(interface: *Interface, allocator: std.mem.Allocator, infos: *const vk.DeviceCreateInfo) VkError!*base.Device { pub fn createDevice(interface: *Interface, allocator: std.mem.Allocator, infos: *const vk.DeviceCreateInfo) VkError!*base.Device {
const device = try SoftDevice.create(interface, allocator, infos); const device = try SoftDevice.create(interface.instance, interface, allocator, infos);
return &device.interface; return &device.interface;
} }
@@ -515,7 +515,7 @@ pub fn getFormatProperties(interface: *Interface, format: vk.Format) VkError!vk.
else => {}, else => {},
} }
if (base.Image.formatSupportsColorAttachemendBlend(format)) { if (base.format.supportsColorAttachemendBlend(format)) {
properties.optimal_tiling_features.color_attachment_blend_bit = true; properties.optimal_tiling_features.color_attachment_blend_bit = true;
} }

View File

@@ -10,6 +10,7 @@ const NonDispatchable = base.NonDispatchable;
const ShaderModule = base.ShaderModule; const ShaderModule = base.ShaderModule;
const SoftDevice = @import("SoftDevice.zig"); const SoftDevice = @import("SoftDevice.zig");
const SoftInstance = @import("SoftInstance.zig");
const SoftShaderModule = @import("SoftShaderModule.zig"); const SoftShaderModule = @import("SoftShaderModule.zig");
const Self = @This(); const Self = @This();
@@ -49,27 +50,54 @@ pub fn createCompute(device: *base.Device, allocator: std.mem.Allocator, cache:
const device_allocator = soft_device.device_allocator.allocator(); const device_allocator = soft_device.device_allocator.allocator();
const instance: *SoftInstance = @alignCast(@fieldParentPtr("interface", device.instance));
const runtimes_count = switch (instance.threaded.async_limit) {
.nothing => 1,
.unlimited => std.Thread.getCpuCount() catch 1, // If we cannot get the CPU count, fallback on single runtime
else => |count| blk: {
const cpu_count: usize = std.Thread.getCpuCount() catch break :blk @intFromEnum(count);
break :blk if (@intFromEnum(count) >= cpu_count) cpu_count else @intFromEnum(count);
},
};
self.* = .{ self.* = .{
.interface = interface, .interface = interface,
.stages = std.EnumMap(Stages, Shader).init(.{ .stages = std.EnumMap(Stages, Shader).init(.{
.compute = .{ .compute = blk: {
.module = blk: { var shader: Shader = undefined;
soft_module.ref(); soft_module.ref();
break :blk soft_module; shader.module = soft_module;
},
.runtimes = blk: { const runtimes = device_allocator.alloc(spv.Runtime, runtimes_count) catch return VkError.OutOfHostMemory;
const runtimes = device_allocator.alloc(spv.Runtime, soft_device.workers.getIdCount()) catch return VkError.OutOfHostMemory; errdefer {
errdefer device_allocator.free(runtimes); for (runtimes) |*runtime| {
runtime.deinit(device_allocator);
}
device_allocator.free(runtimes);
}
for (runtimes) |*runtime| { for (runtimes) |*runtime| {
runtime.* = spv.Runtime.init(device_allocator, &soft_module.module) catch |err| { runtime.* = spv.Runtime.init(device_allocator, &soft_module.module) catch |err| {
std.log.scoped(.SpvRuntimeInit).err("SPIR-V Runtime failed to initialize, {s}", .{@errorName(err)}); std.log.scoped(.SpvRuntimeInit).err("SPIR-V Runtime failed to initialize, {s}", .{@errorName(err)});
return VkError.Unknown; return VkError.Unknown;
}; };
if (info.stage.p_specialization_info) |specialization| {
if (specialization.p_map_entries) |map| {
const data: []const u8 = @as([*]const u8, @ptrCast(@alignCast(specialization.p_data)))[0..specialization.data_size];
for (map[0..], 0..specialization.map_entry_count) |entry, _| {
runtime.addSpecializationInfo(device_allocator, .{
.id = @intCast(entry.constant_id),
.offset = @intCast(entry.offset),
.size = @intCast(entry.size),
}, data) catch return VkError.OutOfHostMemory;
} }
break :blk runtimes; }
}, }
.entry = allocator.dupe(u8, std.mem.span(info.stage.p_name)) catch return VkError.OutOfHostMemory, }
shader.runtimes = runtimes;
shader.entry = device_allocator.dupe(u8, std.mem.span(info.stage.p_name)) catch return VkError.OutOfHostMemory;
break :blk shader;
}, },
}), }),
}; };
@@ -86,9 +114,17 @@ pub fn createGraphics(device: *base.Device, allocator: std.mem.Allocator, cache:
.destroy = destroy, .destroy = destroy,
}; };
const soft_device: *SoftDevice = @alignCast(@fieldParentPtr("interface", device)); const instance: *SoftInstance = @alignCast(@fieldParentPtr("interface", device.instance));
const runtimes_count = switch (instance.threaded.async_limit) {
.nothing => 1,
.unlimited => std.Thread.getCpuCount() catch 1, // If we cannot get the CPU count, fallback on single runtime
else => |count| blk: {
const cpu_count: usize = std.Thread.getCpuCount() catch break :blk @intFromEnum(count);
break :blk if (@intFromEnum(count) >= cpu_count) cpu_count else @intFromEnum(count);
},
};
const runtimes = allocator.alloc(spv.Runtime, soft_device.workers.getIdCount()) catch return VkError.OutOfHostMemory; const runtimes = allocator.alloc(spv.Runtime, runtimes_count) catch return VkError.OutOfHostMemory;
errdefer allocator.free(runtimes); errdefer allocator.free(runtimes);
//for (runtimes) |*runtime| { //for (runtimes) |*runtime| {
@@ -112,12 +148,12 @@ pub fn destroy(interface: *Interface, allocator: std.mem.Allocator) void {
var it = self.stages.iterator(); var it = self.stages.iterator();
while (it.next()) |stage| { while (it.next()) |stage| {
stage.value.module.unref(allocator);
for (stage.value.runtimes) |*runtime| { for (stage.value.runtimes) |*runtime| {
runtime.deinit(device_allocator); runtime.deinit(device_allocator);
} }
device_allocator.free(stage.value.runtimes); device_allocator.free(stage.value.runtimes);
allocator.free(stage.value.entry); device_allocator.free(stage.value.entry);
stage.value.module.unref(allocator);
} }
allocator.destroy(self); allocator.destroy(self);
} }

View File

@@ -17,7 +17,7 @@ const Self = @This();
pub const Interface = base.Queue; pub const Interface = base.Queue;
interface: Interface, interface: Interface,
lock: std.Thread.RwLock, lock: std.Io.RwLock,
pub fn create(allocator: std.mem.Allocator, device: *base.Device, index: u32, family_index: u32, flags: vk.DeviceQueueCreateFlags) VkError!*Interface { pub fn create(allocator: std.mem.Allocator, device: *base.Device, index: u32, family_index: u32, flags: vk.DeviceQueueCreateFlags) VkError!*Interface {
const self = allocator.create(Self) catch return VkError.OutOfHostMemory; const self = allocator.create(Self) catch return VkError.OutOfHostMemory;
@@ -33,7 +33,7 @@ pub fn create(allocator: std.mem.Allocator, device: *base.Device, index: u32, fa
self.* = .{ self.* = .{
.interface = interface, .interface = interface,
.lock = .{}, .lock = .init,
}; };
return &self.interface; return &self.interface;
} }
@@ -56,10 +56,11 @@ pub fn submit(interface: *Interface, infos: []Interface.SubmitInfo, p_fence: ?*b
const soft_device: *SoftDevice = @alignCast(@fieldParentPtr("interface", interface.owner)); const soft_device: *SoftDevice = @alignCast(@fieldParentPtr("interface", interface.owner));
const allocator = soft_device.device_allocator.allocator(); const allocator = soft_device.device_allocator.allocator();
const io = soft_device.interface.io();
// Lock here to avoid acquiring it in `waitIdle` before runners start // Lock here to avoid acquiring it in `waitIdle` before runners start
self.lock.lockShared(); self.lock.lockShared(io) catch return VkError.DeviceLost;
defer self.lock.unlockShared(); defer self.lock.unlockShared(io);
for (infos) |info| { for (infos) |info| {
// Cloning info to keep them alive until command execution ends // Cloning info to keep them alive until command execution ends
@@ -68,19 +69,23 @@ pub fn submit(interface: *Interface, infos: []Interface.SubmitInfo, p_fence: ?*b
}; };
const runners_counter = allocator.create(RefCounter) catch return VkError.OutOfDeviceMemory; const runners_counter = allocator.create(RefCounter) catch return VkError.OutOfDeviceMemory;
runners_counter.* = .init; runners_counter.* = .init;
soft_device.workers.spawn(Self.taskRunner, .{ self, cloned_info, p_fence, runners_counter }) catch return VkError.Unknown; _ = soft_device.interface.io().async(Self.taskRunner, .{ self, cloned_info, p_fence, runners_counter });
} }
} }
pub fn waitIdle(interface: *Interface) VkError!void { pub fn waitIdle(interface: *Interface) VkError!void {
const self: *Self = @alignCast(@fieldParentPtr("interface", interface)); const self: *Self = @alignCast(@fieldParentPtr("interface", interface));
self.lock.lock(); const io = interface.owner.io();
defer self.lock.unlock();
self.lock.lock(io) catch return VkError.DeviceLost;
defer self.lock.unlock(io);
} }
fn taskRunner(self: *Self, info: Interface.SubmitInfo, p_fence: ?*base.Fence, runners_counter: *RefCounter) void { fn taskRunner(self: *Self, info: Interface.SubmitInfo, p_fence: ?*base.Fence, runners_counter: *RefCounter) void {
self.lock.lockShared(); const io = self.interface.owner.io();
defer self.lock.unlockShared();
self.lock.lockShared(io) catch return;
defer self.lock.unlockShared(io);
runners_counter.ref(); runners_counter.ref();
defer { defer {

View File

@@ -32,13 +32,13 @@ pub fn create(device: *base.Device, allocator: std.mem.Allocator, info: *const v
self.* = .{ self.* = .{
.interface = interface, .interface = interface,
.module = spv.Module.init(allocator, code, .{ .module = spv.Module.init(allocator, code, .{
.use_simd_vectors_specializations = !std.process.hasEnvVarConstant(lib.NO_SHADER_SIMD_ENV_NAME), .use_simd_vectors_specializations = base.config.shaders_simd,
}) catch |err| switch (err) { }) catch |err| switch (err) {
spv.Module.ModuleError.OutOfMemory => return VkError.OutOfHostMemory, spv.Module.ModuleError.OutOfMemory => return VkError.OutOfHostMemory,
else => { else => {
std.log.scoped(.@"SPIR-V module").err("module creation catched a '{s}'", .{@errorName(err)}); std.log.scoped(.@"SPIR-V module").err("module creation catched a '{s}'", .{@errorName(err)});
if (@errorReturnTrace()) |trace| { if (@errorReturnTrace()) |trace| {
std.debug.dumpStackTrace(trace.*); std.debug.dumpErrorReturnTrace(trace);
} }
return VkError.ValidationFailed; return VkError.ValidationFailed;
}, },

View File

@@ -4,19 +4,17 @@ const std = @import("std");
const vk = @import("vulkan"); const vk = @import("vulkan");
const base = @import("base"); const base = @import("base");
const VkError = base.VkError;
pub const SoftImage = @import("../SoftImage.zig"); pub const SoftImage = @import("../SoftImage.zig");
pub const SoftImageView = @import("../SoftImageView.zig"); pub const SoftImageView = @import("../SoftImageView.zig");
const Self = @This(); const Self = @This();
blit_mutex: std.Thread.Mutex, pub const init: Self = .{};
pub const init: Self = .{ pub fn clear(self: *Self, pixel: vk.ClearValue, format: vk.Format, dest: *SoftImage, view_format: vk.Format, range: vk.ImageSubresourceRange, area: ?vk.Rect2D) VkError!void {
.blit_mutex = .{}, const dst_format = base.format.fromAspect(view_format, range.aspect_mask);
};
pub fn clear(self: *Self, pixel: vk.ClearValue, format: vk.Format, dest: *SoftImage, view_format: vk.Format, range: vk.ImageSubresourceRange, area: ?vk.Rect2D) void {
const dst_format = base.Image.formatFromAspect(view_format, range.aspect_mask);
if (dst_format == .undefined) { if (dst_format == .undefined) {
return; return;
} }
@@ -40,13 +38,13 @@ pub fn clear(self: *Self, pixel: vk.ClearValue, format: vk.Format, dest: *SoftIm
} }
} }
if (self.fastClear(clamped_pixel, format, dest, dst_format, range, area)) { if (try self.fastClear(clamped_pixel, format, dest, dst_format, range, area)) {
return; return;
} }
base.logger.fixme("implement slow clear", .{}); base.logger.fixme("implement slow clear", .{});
} }
fn fastClear(self: *Self, clear_value: vk.ClearValue, clear_format: vk.Format, dest: *SoftImage, view_format: vk.Format, range: vk.ImageSubresourceRange, render_area: ?vk.Rect2D) bool { fn fastClear(self: *Self, clear_value: vk.ClearValue, clear_format: vk.Format, dest: *SoftImage, view_format: vk.Format, range: vk.ImageSubresourceRange, render_area: ?vk.Rect2D) VkError!bool {
_ = self; _ = self;
_ = render_area; _ = render_area;
_ = range; _ = range;
@@ -90,7 +88,7 @@ fn fastClear(self: *Self, clear_value: vk.ClearValue, clear_format: vk.Format, d
} }
if (dest.interface.memory) |memory| { if (dest.interface.memory) |memory| {
const image_size = dest.interface.getTotalSize(); const image_size = try dest.interface.getTotalSize();
const memory_map = memory.map(dest.interface.memory_offset, image_size) catch return false; const memory_map = memory.map(dest.interface.memory_offset, image_size) catch return false;
defer memory.unmap(); defer memory.unmap();
@@ -102,3 +100,126 @@ fn fastClear(self: *Self, clear_value: vk.ClearValue, clear_format: vk.Format, d
} }
return false; return false;
} }
pub fn blitRegion(_: *Self, src: *const SoftImage, dst: *SoftImage, region: vk.ImageBlit, filter: vk.Filter) VkError!void {
var dst_offset_0 = region.dst_offsets[0];
var dst_offset_1 = region.dst_offsets[1];
var src_offset_0 = region.src_offsets[0];
var src_offset_1 = region.src_offsets[1];
if (dst_offset_0.x > dst_offset_1.x) {
std.mem.swap(i32, &src_offset_0.x, &src_offset_1.x);
std.mem.swap(i32, &dst_offset_0.x, &dst_offset_1.x);
}
if (dst_offset_0.y > dst_offset_1.y) {
std.mem.swap(i32, &src_offset_0.y, &src_offset_1.y);
std.mem.swap(i32, &dst_offset_0.y, &dst_offset_1.y);
}
if (dst_offset_0.z > dst_offset_1.z) {
std.mem.swap(i32, &src_offset_0.z, &src_offset_1.z);
std.mem.swap(i32, &dst_offset_0.z, &dst_offset_1.z);
}
const src_extent = src.getMipLevelExtent(region.src_subresource.mip_level);
_ = src_extent;
const width_ratio = @as(f32, @floatFromInt(src_offset_1.x - src_offset_0.x)) / @as(f32, @floatFromInt(dst_offset_1.x - dst_offset_0.x));
const height_ratio = @as(f32, @floatFromInt(src_offset_1.y - src_offset_0.y)) / @as(f32, @floatFromInt(dst_offset_1.y - dst_offset_0.y));
const depth_ratio = @as(f32, @floatFromInt(src_offset_1.z - src_offset_0.z)) / @as(f32, @floatFromInt(dst_offset_1.z - dst_offset_0.z));
const x0 = @as(f32, @floatFromInt(src_offset_0.x)) + (0.5 - @as(f32, @floatFromInt(dst_offset_0.x))) * width_ratio;
const y0 = @as(f32, @floatFromInt(src_offset_0.y)) + (0.5 - @as(f32, @floatFromInt(dst_offset_0.y))) * height_ratio;
const z0 = @as(f32, @floatFromInt(src_offset_0.z)) + (0.5 - @as(f32, @floatFromInt(dst_offset_0.z))) * depth_ratio;
_ = x0;
_ = y0;
_ = z0;
const src_format = base.format.fromAspect(src.interface.format, region.src_subresource.aspect_mask);
const dst_format = base.format.fromAspect(dst.interface.format, region.dst_subresource.aspect_mask);
const apply_filter = (filter != .nearest);
const allow_srgb_conversion = apply_filter or base.format.isSrgb(src_format) != base.format.isSrgb(dst_format);
_ = allow_srgb_conversion;
}
// State state(srcFormat, dstFormat, src->getSampleCount(), dst->getSampleCount(),
// Options{ doFilter, allowSRGBConversion });
// state.clampToEdge = (region.srcOffsets[0].x < 0) ||
// (region.srcOffsets[0].y < 0) ||
// (static_cast<uint32_t>(region.srcOffsets[1].x) > srcExtent.width) ||
// (static_cast<uint32_t>(region.srcOffsets[1].y) > srcExtent.height) ||
// (doFilter && ((x0 < 0.5f) || (y0 < 0.5f)));
// state.filter3D = (region.srcOffsets[1].z - region.srcOffsets[0].z) !=
// (region.dstOffsets[1].z - region.dstOffsets[0].z);
//
// auto blitRoutine = getBlitRoutine(state);
// if(!blitRoutine)
// {
// return;
// }
//
// BlitData data = {
// nullptr, // source
// nullptr, // dest
// assert_cast<uint32_t>(src->rowPitchBytes(srcAspect, region.srcSubresource.mipLevel)), // sPitchB
// assert_cast<uint32_t>(dst->rowPitchBytes(dstAspect, region.dstSubresource.mipLevel)), // dPitchB
// assert_cast<uint32_t>(src->slicePitchBytes(srcAspect, region.srcSubresource.mipLevel)), // sSliceB
// assert_cast<uint32_t>(dst->slicePitchBytes(dstAspect, region.dstSubresource.mipLevel)), // dSliceB
//
// x0,
// y0,
// z0,
// widthRatio,
// heightRatio,
// depthRatio,
//
// region.dstOffsets[0].x, // x0d
// region.dstOffsets[1].x, // x1d
// region.dstOffsets[0].y, // y0d
// region.dstOffsets[1].y, // y1d
// region.dstOffsets[0].z, // z0d
// region.dstOffsets[1].z, // z1d
//
// static_cast<int>(srcExtent.width), // sWidth
// static_cast<int>(srcExtent.height), // sHeight
// static_cast<int>(srcExtent.depth), // sDepth
//
// false, // filter3D
// };
//
// VkImageSubresource srcSubres = {
// region.srcSubresource.aspectMask,
// region.srcSubresource.mipLevel,
// region.srcSubresource.baseArrayLayer
// };
//
// VkImageSubresource dstSubres = {
// region.dstSubresource.aspectMask,
// region.dstSubresource.mipLevel,
// region.dstSubresource.baseArrayLayer
// };
//
// VkImageSubresourceRange dstSubresRange = {
// region.dstSubresource.aspectMask,
// region.dstSubresource.mipLevel,
// 1, // levelCount
// region.dstSubresource.baseArrayLayer,
// region.dstSubresource.layerCount
// };
//
// uint32_t lastLayer = src->getLastLayerIndex(dstSubresRange);
//
// for(; dstSubres.arrayLayer <= lastLayer; srcSubres.arrayLayer++, dstSubres.arrayLayer++)
// {
// data.source = src->getTexelPointer({ 0, 0, 0 }, srcSubres);
// data.dest = dst->getTexelPointer({ 0, 0, 0 }, dstSubres);
//
// ASSERT(data.source < src->end());
// ASSERT(data.dest < dst->end());
//
// blitRoutine(&data);
// }

View File

@@ -29,11 +29,19 @@ device: *SoftDevice,
state: *PipelineState, state: *PipelineState,
batch_size: usize, batch_size: usize,
invocation_index: std.atomic.Value(usize),
early_dump: ?u32,
final_dump: ?u32,
pub fn init(device: *SoftDevice, state: *PipelineState) Self { pub fn init(device: *SoftDevice, state: *PipelineState) Self {
return .{ return .{
.device = device, .device = device,
.state = state, .state = state,
.batch_size = 0, .batch_size = 0,
.invocation_index = .init(0),
.early_dump = base.config.compute_dump_early_results_table,
.final_dump = base.config.compute_dump_final_results_table,
}; };
} }
@@ -51,9 +59,11 @@ pub fn dispatch(self: *Self, group_count_x: u32, group_count_y: u32, group_count
const invocations_per_workgroup = spv_module.local_size_x * spv_module.local_size_y * spv_module.local_size_z; const invocations_per_workgroup = spv_module.local_size_x * spv_module.local_size_y * spv_module.local_size_z;
var wg: std.Thread.WaitGroup = .{}; self.invocation_index.store(0, .monotonic);
var wg: std.Io.Group = .init;
for (0..@min(self.batch_size, group_count)) |batch_id| { for (0..@min(self.batch_size, group_count)) |batch_id| {
if (std.process.hasEnvVarConstant(lib.SINGLE_THREAD_COMPUTE_EXECUTION_ENV_NAME)) { if (base.config.single_threaded_compute) {
@branchHint(.cold); // Should only be reached for debugging @branchHint(.cold); // Should only be reached for debugging
runWrapper( runWrapper(
@@ -69,7 +79,7 @@ pub fn dispatch(self: *Self, group_count_x: u32, group_count_y: u32, group_count
}, },
); );
} else { } else {
self.device.workers.spawnWg(&wg, runWrapper, .{ wg.async(self.device.interface.io(), runWrapper, .{
RunData{ RunData{
.self = self, .self = self,
.batch_id = batch_id, .batch_id = batch_id,
@@ -83,20 +93,21 @@ pub fn dispatch(self: *Self, group_count_x: u32, group_count_y: u32, group_count
}); });
} }
} }
self.device.workers.waitAndWork(&wg); wg.await(self.device.interface.io()) catch return VkError.DeviceLost;
} }
fn runWrapper(data: RunData) void { fn runWrapper(data: RunData) void {
@call(.always_inline, run, .{data}) catch |err| { @call(.always_inline, run, .{data}) catch |err| {
std.log.scoped(.@"SPIR-V runtime").err("SPIR-V runtime catched a '{s}'", .{@errorName(err)}); std.log.scoped(.@"SPIR-V runtime").err("SPIR-V runtime catched a '{s}'", .{@errorName(err)});
if (@errorReturnTrace()) |trace| { if (@errorReturnTrace()) |trace| {
std.debug.dumpStackTrace(trace.*); std.debug.dumpErrorReturnTrace(trace);
} }
}; };
} }
inline fn run(data: RunData) !void { inline fn run(data: RunData) !void {
const allocator = data.self.device.device_allocator.allocator(); const allocator = data.self.device.device_allocator.allocator();
const io = data.self.device.interface.io();
const shader = data.pipeline.stages.getPtrAssertContains(.compute); const shader = data.pipeline.stages.getPtrAssertContains(.compute);
const rt = &shader.runtimes[data.batch_id]; const rt = &shader.runtimes[data.batch_id];
@@ -128,12 +139,19 @@ inline fn run(data: RunData) !void {
}); });
for (0..data.invocations_per_workgroup) |i| { for (0..data.invocations_per_workgroup) |i| {
const invocation_index = data.self.invocation_index.fetchAdd(1, .monotonic);
try setupSubgroupBuiltins(data.self, rt, .{ try setupSubgroupBuiltins(data.self, rt, .{
@as(u32, @intCast(group_x)), @as(u32, @intCast(group_x)),
@as(u32, @intCast(group_y)), @as(u32, @intCast(group_y)),
@as(u32, @intCast(group_z)), @as(u32, @intCast(group_z)),
}, i); }, i);
if (data.self.early_dump != null and data.self.early_dump.? == invocation_index) {
@branchHint(.cold);
try dumpResultsTable(allocator, io, rt, true);
}
rt.callEntryPoint(allocator, entry) catch |err| switch (err) { rt.callEntryPoint(allocator, entry) catch |err| switch (err) {
// Some errors can be ignored // Some errors can be ignored
SpvRuntimeError.OutOfBounds, SpvRuntimeError.OutOfBounds,
@@ -141,11 +159,30 @@ inline fn run(data: RunData) !void {
=> {}, => {},
else => return err, else => return err,
}; };
if (data.self.final_dump != null and data.self.final_dump.? == invocation_index) {
@branchHint(.cold);
try dumpResultsTable(allocator, io, rt, false);
}
try rt.flushDescriptorSets(allocator); try rt.flushDescriptorSets(allocator);
} }
} }
} }
inline fn dumpResultsTable(allocator: std.mem.Allocator, io: std.Io, rt: *spv.Runtime, is_early: bool) !void {
@branchHint(.cold);
const file = try std.Io.Dir.cwd().createFile(
io,
std.fmt.comptimePrint("{s}_compute_result_table_dump.txt", .{if (is_early) "early" else "final"}),
.{ .truncate = true },
);
defer file.close(io);
var buffer = [_]u8{0} ** 1024;
var writer = file.writer(io, buffer[0..]);
try rt.dumpResultsTable(allocator, &writer.interface);
}
fn writeDescriptorSets(self: *Self, rt: *spv.Runtime) !void { fn writeDescriptorSets(self: *Self, rt: *spv.Runtime) !void {
sets: for (self.state.sets[0..], 0..) |set, set_index| { sets: for (self.state.sets[0..], 0..) |set, set_index| {
if (set == null) if (set == null)
@@ -165,6 +202,18 @@ fn writeDescriptorSets(self: *Self, rt: *spv.Runtime) !void {
); );
} }
}, },
.image => |image_data_array| for (image_data_array, 0..) |image_data, descriptor_index| {
if (image_data.object) |image| {
const memory = if (image.interface.memory) |memory| memory else continue :bindings;
const map: []u8 = @as([*]u8, @ptrCast(try memory.map(image.interface.memory_offset, try image.interface.getTotalSize())))[0..try image.interface.getTotalSize()];
try rt.writeDescriptorSet(
map,
@as(u32, @intCast(set_index)),
@as(u32, @intCast(binding_index)),
@as(u32, @intCast(descriptor_index)),
);
}
},
else => {}, else => {},
} }
} }

View File

@@ -6,6 +6,7 @@ const SoftDescriptorSet = @import("../SoftDescriptorSet.zig");
const SoftDevice = @import("../SoftDevice.zig"); const SoftDevice = @import("../SoftDevice.zig");
const SoftPipeline = @import("../SoftPipeline.zig"); const SoftPipeline = @import("../SoftPipeline.zig");
const Blitter = @import("Blitter.zig");
const ComputeRoutines = @import("ComputeRoutines.zig"); const ComputeRoutines = @import("ComputeRoutines.zig");
const PipelineState = @import("PipelineState.zig"); const PipelineState = @import("PipelineState.zig");
@@ -13,6 +14,8 @@ const VkError = base.VkError;
const Self = @This(); const Self = @This();
blitter: Blitter,
compute_routines: ComputeRoutines, compute_routines: ComputeRoutines,
/// .graphics = 0 /// .graphics = 0
@@ -20,6 +23,7 @@ compute_routines: ComputeRoutines,
pipeline_states: [2]PipelineState, pipeline_states: [2]PipelineState,
pub const init: Self = .{ pub const init: Self = .{
.blitter = .init,
.compute_routines = undefined, .compute_routines = undefined,
.pipeline_states = undefined, .pipeline_states = undefined,
}; };

View File

@@ -40,9 +40,6 @@ pub const VULKAN_VERSION = vk.makeApiVersion(0, 1, 0, 0);
pub const DRIVER_VERSION = vk.makeApiVersion(0, 0, 0, 1); pub const DRIVER_VERSION = vk.makeApiVersion(0, 0, 0, 1);
pub const DEVICE_ID = 0x600DCAFE; pub const DEVICE_ID = 0x600DCAFE;
pub const NO_SHADER_SIMD_ENV_NAME = "STROLL_SOFT_NO_SIMD";
pub const SINGLE_THREAD_COMPUTE_EXECUTION_ENV_NAME = "STROLL_SINGLE_THREAD_COMPUTE_EXECUTION";
/// Generic system memory. /// Generic system memory.
pub const MEMORY_TYPE_GENERIC_BIT = 0; pub const MEMORY_TYPE_GENERIC_BIT = 0;

View File

@@ -31,7 +31,7 @@ pool: *CommandPool,
state: State, state: State,
begin_info: ?vk.CommandBufferBeginInfo, begin_info: ?vk.CommandBufferBeginInfo,
host_allocator: VulkanAllocator, host_allocator: VulkanAllocator,
state_mutex: std.Thread.Mutex, state_mutex: std.Io.Mutex,
vtable: *const VTable, vtable: *const VTable,
dispatch_table: *const DispatchTable, dispatch_table: *const DispatchTable,
@@ -40,6 +40,7 @@ pub const DispatchTable = struct {
begin: *const fn (*Self, *const vk.CommandBufferBeginInfo) VkError!void, begin: *const fn (*Self, *const vk.CommandBufferBeginInfo) VkError!void,
bindDescriptorSets: *const fn (*Self, vk.PipelineBindPoint, u32, [lib.VULKAN_MAX_DESCRIPTOR_SETS]?*DescriptorSet, []const u32) VkError!void, bindDescriptorSets: *const fn (*Self, vk.PipelineBindPoint, u32, [lib.VULKAN_MAX_DESCRIPTOR_SETS]?*DescriptorSet, []const u32) VkError!void,
bindPipeline: *const fn (*Self, vk.PipelineBindPoint, *Pipeline) VkError!void, bindPipeline: *const fn (*Self, vk.PipelineBindPoint, *Pipeline) VkError!void,
blitImage: *const fn (*Self, *Image, vk.ImageLayout, *Image, vk.ImageLayout, []const vk.ImageBlit, vk.Filter) VkError!void,
clearColorImage: *const fn (*Self, *Image, vk.ImageLayout, *const vk.ClearColorValue, vk.ImageSubresourceRange) VkError!void, clearColorImage: *const fn (*Self, *Image, vk.ImageLayout, *const vk.ClearColorValue, vk.ImageSubresourceRange) VkError!void,
copyBuffer: *const fn (*Self, *Buffer, *Buffer, []const vk.BufferCopy) VkError!void, copyBuffer: *const fn (*Self, *Buffer, *Buffer, []const vk.BufferCopy) VkError!void,
copyBufferToImage: *const fn (*Self, *Buffer, *Image, vk.ImageLayout, []const vk.BufferImageCopy) VkError!void, copyBufferToImage: *const fn (*Self, *Buffer, *Image, vk.ImageLayout, []const vk.BufferImageCopy) VkError!void,
@@ -48,6 +49,7 @@ pub const DispatchTable = struct {
dispatch: *const fn (*Self, u32, u32, u32) VkError!void, dispatch: *const fn (*Self, u32, u32, u32) VkError!void,
dispatchIndirect: *const fn (*Self, *Buffer, vk.DeviceSize) VkError!void, dispatchIndirect: *const fn (*Self, *Buffer, vk.DeviceSize) VkError!void,
end: *const fn (*Self) VkError!void, end: *const fn (*Self) VkError!void,
executeCommands: *const fn (*Self, *Self) VkError!void,
fillBuffer: *const fn (*Self, *Buffer, vk.DeviceSize, vk.DeviceSize, u32) VkError!void, fillBuffer: *const fn (*Self, *Buffer, vk.DeviceSize, vk.DeviceSize, u32) VkError!void,
pipelineBarrier: *const fn (*Self, vk.PipelineStageFlags, vk.PipelineStageFlags, vk.DependencyFlags, []const vk.MemoryBarrier, []const vk.BufferMemoryBarrier, []const vk.ImageMemoryBarrier) VkError!void, pipelineBarrier: *const fn (*Self, vk.PipelineStageFlags, vk.PipelineStageFlags, vk.DependencyFlags, []const vk.MemoryBarrier, []const vk.BufferMemoryBarrier, []const vk.ImageMemoryBarrier) VkError!void,
reset: *const fn (*Self, vk.CommandBufferResetFlags) VkError!void, reset: *const fn (*Self, vk.CommandBufferResetFlags) VkError!void,
@@ -67,7 +69,7 @@ pub fn init(device: *Device, allocator: std.mem.Allocator, info: *const vk.Comma
.state = .Initial, .state = .Initial,
.begin_info = null, .begin_info = null,
.host_allocator = VulkanAllocator.from(allocator).cloneWithScope(.object), .host_allocator = VulkanAllocator.from(allocator).cloneWithScope(.object),
.state_mutex = .{}, .state_mutex = .init,
.vtable = undefined, .vtable = undefined,
.dispatch_table = undefined, .dispatch_table = undefined,
}; };
@@ -77,8 +79,11 @@ inline fn transitionState(self: *Self, target: State, from_allowed: []const Stat
if (!std.EnumSet(State).initMany(from_allowed).contains(self.state)) { if (!std.EnumSet(State).initMany(from_allowed).contains(self.state)) {
return error.NotAllowed; return error.NotAllowed;
} }
self.state_mutex.lock(); const io = self.owner.io();
defer self.state_mutex.unlock();
self.state_mutex.lockUncancelable(io);
defer self.state_mutex.unlock(io);
self.state = target; self.state = target;
} }
@@ -90,7 +95,8 @@ pub inline fn begin(self: *Self, info: *const vk.CommandBufferBeginInfo) VkError
if (!self.pool.flags.reset_command_buffer_bit) { if (!self.pool.flags.reset_command_buffer_bit) {
self.transitionState(.Recording, &.{.Initial}) catch return VkError.ValidationFailed; self.transitionState(.Recording, &.{.Initial}) catch return VkError.ValidationFailed;
} else { } else {
self.transitionState(.Recording, &.{ .Initial, .Executable, .Invalid }) catch return VkError.ValidationFailed; try self.reset(.{});
self.transitionState(.Recording, &.{ .Initial, .Recording, .Executable, .Invalid }) catch return VkError.ValidationFailed;
} }
try self.dispatch_table.begin(self, info); try self.dispatch_table.begin(self, info);
self.begin_info = info.*; self.begin_info = info.*;
@@ -99,6 +105,7 @@ pub inline fn begin(self: *Self, info: *const vk.CommandBufferBeginInfo) VkError
pub inline fn end(self: *Self) VkError!void { pub inline fn end(self: *Self) VkError!void {
self.transitionState(.Executable, &.{.Recording}) catch return VkError.ValidationFailed; self.transitionState(.Executable, &.{.Recording}) catch return VkError.ValidationFailed;
try self.dispatch_table.end(self); try self.dispatch_table.end(self);
self.begin_info = null;
} }
pub inline fn reset(self: *Self, flags: vk.CommandBufferResetFlags) VkError!void { pub inline fn reset(self: *Self, flags: vk.CommandBufferResetFlags) VkError!void {
@@ -133,6 +140,7 @@ pub inline fn finish(self: *Self) VkError!void {
// Commands ==================================================================================================== // Commands ====================================================================================================
pub inline fn bindDescriptorSets(self: *Self, bind_point: vk.PipelineBindPoint, first_set: u32, sets: []const vk.DescriptorSet, dynamic_offsets: []const u32) VkError!void { pub inline fn bindDescriptorSets(self: *Self, bind_point: vk.PipelineBindPoint, first_set: u32, sets: []const vk.DescriptorSet, dynamic_offsets: []const u32) VkError!void {
std.debug.assert(sets.len < lib.VULKAN_MAX_DESCRIPTOR_SETS);
var inner_sets = [_]?*DescriptorSet{null} ** lib.VULKAN_MAX_DESCRIPTOR_SETS; var inner_sets = [_]?*DescriptorSet{null} ** lib.VULKAN_MAX_DESCRIPTOR_SETS;
for (sets, inner_sets[0..sets.len]) |set, *inner_set| { for (sets, inner_sets[0..sets.len]) |set, *inner_set| {
inner_set.* = try NonDispatchable(DescriptorSet).fromHandleObject(set); inner_set.* = try NonDispatchable(DescriptorSet).fromHandleObject(set);
@@ -144,6 +152,10 @@ pub inline fn bindPipeline(self: *Self, bind_point: vk.PipelineBindPoint, pipeli
try self.dispatch_table.bindPipeline(self, bind_point, pipeline); try self.dispatch_table.bindPipeline(self, bind_point, pipeline);
} }
pub inline fn blitImage(self: *Self, src: *Image, src_layout: vk.ImageLayout, dst: *Image, dst_layout: vk.ImageLayout, regions: []const vk.ImageBlit, filter: vk.Filter) VkError!void {
try self.dispatch_table.blitImage(self, src, src_layout, dst, dst_layout, regions, filter);
}
pub inline fn clearColorImage(self: *Self, image: *Image, layout: vk.ImageLayout, color: *const vk.ClearColorValue, ranges: []const vk.ImageSubresourceRange) VkError!void { pub inline fn clearColorImage(self: *Self, image: *Image, layout: vk.ImageLayout, color: *const vk.ClearColorValue, ranges: []const vk.ImageSubresourceRange) VkError!void {
for (ranges) |range| { for (ranges) |range| {
try self.dispatch_table.clearColorImage(self, image, layout, color, range); try self.dispatch_table.clearColorImage(self, image, layout, color, range);
@@ -174,6 +186,10 @@ pub inline fn dispatchIndirect(self: *Self, buffer: *Buffer, offset: vk.DeviceSi
try self.dispatch_table.dispatchIndirect(self, buffer, offset); try self.dispatch_table.dispatchIndirect(self, buffer, offset);
} }
pub inline fn executeCommands(self: *Self, commands: *Self) VkError!void {
try self.dispatch_table.executeCommands(self, commands);
}
pub inline fn fillBuffer(self: *Self, buffer: *Buffer, offset: vk.DeviceSize, size: vk.DeviceSize, data: u32) VkError!void { pub inline fn fillBuffer(self: *Self, buffer: *Buffer, offset: vk.DeviceSize, size: vk.DeviceSize, data: u32) VkError!void {
try self.dispatch_table.fillBuffer(self, buffer, offset, size, data); try self.dispatch_table.fillBuffer(self, buffer, offset, size, data);
} }

View File

@@ -72,9 +72,8 @@ pub fn freeCommandBuffers(self: *Self, cmds: []*Dispatchable(CommandBuffer)) VkE
// Ugly method but it works well // Ugly method but it works well
var len: usize = 0; var len: usize = 0;
for (cmds) |cmd| { for (cmds) |cmd| {
if (std.mem.indexOf(*Dispatchable(CommandBuffer), self.buffers.items, &[_]*Dispatchable(CommandBuffer){cmd})) |i| { if (std.mem.indexOfScalar(*Dispatchable(CommandBuffer), self.buffers.items, cmd)) |i| {
const save = self.buffers.orderedRemove(i); const save = self.buffers.orderedRemove(i);
// Append the now free command buffer at the end of the pool
self.buffers.appendAssumeCapacity(save); self.buffers.appendAssumeCapacity(save);
len += 1; len += 1;
} }
@@ -82,8 +81,6 @@ pub fn freeCommandBuffers(self: *Self, cmds: []*Dispatchable(CommandBuffer)) VkE
const new_first_free_buffer_index, const has_overflown = @subWithOverflow(self.first_free_buffer_index, len); const new_first_free_buffer_index, const has_overflown = @subWithOverflow(self.first_free_buffer_index, len);
if (has_overflown == 0) { if (has_overflown == 0) {
self.first_free_buffer_index = new_first_free_buffer_index; self.first_free_buffer_index = new_first_free_buffer_index;
} else {
std.log.scoped(.CommandPool).warn("Avoided an underflow. This should not happen, please fill an issue.", .{});
} }
} }

View File

@@ -21,6 +21,7 @@ pub const VTable = struct {
allocateDescriptorSet: *const fn (*Self, *DescriptorSetLayout) VkError!*DescriptorSet, allocateDescriptorSet: *const fn (*Self, *DescriptorSetLayout) VkError!*DescriptorSet,
destroy: *const fn (*Self, std.mem.Allocator) void, destroy: *const fn (*Self, std.mem.Allocator) void,
freeDescriptorSet: *const fn (*Self, *DescriptorSet) VkError!void, freeDescriptorSet: *const fn (*Self, *DescriptorSet) VkError!void,
reset: *const fn (*Self, vk.DescriptorPoolResetFlags) VkError!void,
}; };
pub fn init(device: *Device, allocator: std.mem.Allocator, info: *const vk.DescriptorPoolCreateInfo) VkError!Self { pub fn init(device: *Device, allocator: std.mem.Allocator, info: *const vk.DescriptorPoolCreateInfo) VkError!Self {
@@ -43,3 +44,7 @@ pub inline fn destroy(self: *Self, allocator: std.mem.Allocator) void {
pub inline fn freeDescriptorSet(self: *Self, set: *DescriptorSet) VkError!void { pub inline fn freeDescriptorSet(self: *Self, set: *DescriptorSet) VkError!void {
try self.vtable.freeDescriptorSet(self, set); try self.vtable.freeDescriptorSet(self, set);
} }
pub inline fn reset(self: *Self, flags: vk.DescriptorPoolResetFlags) VkError!void {
try self.vtable.reset(self, flags);
}

View File

@@ -1,8 +1,6 @@
const std = @import("std"); const std = @import("std");
const vk = @import("vulkan"); const vk = @import("vulkan");
const logger = @import("lib.zig").logger;
const Dispatchable = @import("Dispatchable.zig").Dispatchable; const Dispatchable = @import("Dispatchable.zig").Dispatchable;
const NonDispatchable = @import("NonDispatchable.zig").NonDispatchable; const NonDispatchable = @import("NonDispatchable.zig").NonDispatchable;
const VulkanAllocator = @import("VulkanAllocator.zig"); const VulkanAllocator = @import("VulkanAllocator.zig");
@@ -23,6 +21,7 @@ const Event = @import("Event.zig");
const Fence = @import("Fence.zig"); const Fence = @import("Fence.zig");
const Framebuffer = @import("Framebuffer.zig"); const Framebuffer = @import("Framebuffer.zig");
const Image = @import("Image.zig"); const Image = @import("Image.zig");
const Instance = @import("Instance.zig");
const ImageView = @import("ImageView.zig"); const ImageView = @import("ImageView.zig");
const Pipeline = @import("Pipeline.zig"); const Pipeline = @import("Pipeline.zig");
const PipelineCache = @import("PipelineCache.zig"); const PipelineCache = @import("PipelineCache.zig");
@@ -35,6 +34,7 @@ const ShaderModule = @import("ShaderModule.zig");
const Self = @This(); const Self = @This();
pub const ObjectType: vk.ObjectType = .device; pub const ObjectType: vk.ObjectType = .device;
instance: *Instance,
physical_device: *const PhysicalDevice, physical_device: *const PhysicalDevice,
queues: std.AutoArrayHashMapUnmanaged(u32, std.ArrayList(*Dispatchable(Queue))), queues: std.AutoArrayHashMapUnmanaged(u32, std.ArrayList(*Dispatchable(Queue))),
host_allocator: VulkanAllocator, host_allocator: VulkanAllocator,
@@ -71,9 +71,10 @@ pub const DispatchTable = struct {
destroy: *const fn (*Self, std.mem.Allocator) VkError!void, destroy: *const fn (*Self, std.mem.Allocator) VkError!void,
}; };
pub fn init(allocator: std.mem.Allocator, physical_device: *const PhysicalDevice, info: *const vk.DeviceCreateInfo) VkError!Self { pub fn init(allocator: std.mem.Allocator, instance: *Instance, physical_device: *const PhysicalDevice, info: *const vk.DeviceCreateInfo) VkError!Self {
_ = info; _ = info;
return .{ return .{
.instance = instance,
.physical_device = physical_device, .physical_device = physical_device,
.queues = .empty, .queues = .empty,
.host_allocator = VulkanAllocator.from(allocator).clone(), .host_allocator = VulkanAllocator.from(allocator).clone(),
@@ -99,14 +100,15 @@ pub fn createQueues(self: *Self, allocator: std.mem.Allocator, info: *const vk.D
const queue = try self.vtable.createQueue(allocator, self, queue_info.queue_family_index, @intCast(family_ptr.items.len), queue_info.flags); const queue = try self.vtable.createQueue(allocator, self, queue_info.queue_family_index, @intCast(family_ptr.items.len), queue_info.flags);
logger.getManager().get().indent();
defer logger.getManager().get().unindent();
const dispatchable_queue = try Dispatchable(Queue).wrap(allocator, queue); const dispatchable_queue = try Dispatchable(Queue).wrap(allocator, queue);
family_ptr.append(allocator, dispatchable_queue) catch return VkError.OutOfHostMemory; family_ptr.append(allocator, dispatchable_queue) catch return VkError.OutOfHostMemory;
} }
} }
pub fn io(self: *const Self) std.Io {
return self.instance.io();
}
pub inline fn destroy(self: *Self, allocator: std.mem.Allocator) VkError!void { pub inline fn destroy(self: *Self, allocator: std.mem.Allocator) VkError!void {
var it = self.queues.iterator(); var it = self.queues.iterator();
while (it.next()) |entry| { while (it.next()) |entry| {

View File

@@ -18,6 +18,7 @@ extent: vk.Extent3D,
mip_levels: u32, mip_levels: u32,
array_layers: u32, array_layers: u32,
samples: vk.SampleCountFlags, samples: vk.SampleCountFlags,
flags: vk.ImageCreateFlags,
tiling: vk.ImageTiling, tiling: vk.ImageTiling,
usage: vk.ImageUsageFlags, usage: vk.ImageUsageFlags,
memory: ?*DeviceMemory, memory: ?*DeviceMemory,
@@ -28,7 +29,8 @@ vtable: *const VTable,
pub const VTable = struct { pub const VTable = struct {
destroy: *const fn (*Self, std.mem.Allocator) void, destroy: *const fn (*Self, std.mem.Allocator) void,
getMemoryRequirements: *const fn (*Self, *vk.MemoryRequirements) void, getMemoryRequirements: *const fn (*Self, *vk.MemoryRequirements) VkError!void,
getTotalSizeForAspect: *const fn (*const Self, vk.ImageAspectFlags) VkError!usize,
}; };
pub fn init(device: *Device, allocator: std.mem.Allocator, info: *const vk.ImageCreateInfo) VkError!Self { pub fn init(device: *Device, allocator: std.mem.Allocator, info: *const vk.ImageCreateInfo) VkError!Self {
@@ -41,6 +43,7 @@ pub fn init(device: *Device, allocator: std.mem.Allocator, info: *const vk.Image
.mip_levels = info.mip_levels, .mip_levels = info.mip_levels,
.array_layers = info.array_layers, .array_layers = info.array_layers,
.samples = info.samples, .samples = info.samples,
.flags = info.flags,
.tiling = info.tiling, .tiling = info.tiling,
.usage = info.usage, .usage = info.usage,
.memory = null, .memory = null,
@@ -55,7 +58,7 @@ pub inline fn destroy(self: *Self, allocator: std.mem.Allocator) void {
} }
pub inline fn bindMemory(self: *Self, memory: *DeviceMemory, offset: vk.DeviceSize) VkError!void { pub inline fn bindMemory(self: *Self, memory: *DeviceMemory, offset: vk.DeviceSize) VkError!void {
const image_size = self.getTotalSize(); const image_size = try self.getTotalSize();
if (offset >= image_size or !self.allowed_memory_types.isSet(memory.memory_type_index)) { if (offset >= image_size or !self.allowed_memory_types.isSet(memory.memory_type_index)) {
return VkError.ValidationFailed; return VkError.ValidationFailed;
} }
@@ -63,94 +66,28 @@ pub inline fn bindMemory(self: *Self, memory: *DeviceMemory, offset: vk.DeviceSi
self.memory_offset = offset; self.memory_offset = offset;
} }
pub inline fn getMemoryRequirements(self: *Self, requirements: *vk.MemoryRequirements) void { pub inline fn getMemoryRequirements(self: *Self, requirements: *vk.MemoryRequirements) VkError!void {
const image_size = self.getTotalSize(); requirements.size = try self.getTotalSize();
requirements.size = image_size;
requirements.memory_type_bits = self.allowed_memory_types.mask; requirements.memory_type_bits = self.allowed_memory_types.mask;
self.vtable.getMemoryRequirements(self, requirements); try self.vtable.getMemoryRequirements(self, requirements);
} }
pub inline fn getClearFormat(self: *Self) vk.Format { pub inline fn getTexelSize(self: *const Self) usize {
return if (lib.vku.vkuFormatIsSINT(@intCast(@intFromEnum(self.format)))) return lib.format.texelSize(self.format);
.r32g32b32a32_sint
else if (lib.vku.vkuFormatIsUINT(@intCast(@intFromEnum(self.format))))
.r32g32b32a32_uint
else
.r32g32b32a32_sfloat;
} }
pub inline fn getPixelSize(self: *const Self) usize { pub inline fn getTotalSizeForAspect(self: *const Self, aspect: vk.ImageAspectFlags) VkError!usize {
return lib.vku.vkuFormatTexelBlockSize(@intCast(@intFromEnum(self.format))); return self.vtable.getTotalSizeForAspect(self, aspect);
} }
pub inline fn getTotalSize(self: *const Self) usize { pub inline fn getTotalSize(self: *const Self) VkError!usize {
const pixel_size = self.getPixelSize(); return self.vtable.getTotalSizeForAspect(self, lib.format.toAspect(self.format));
return self.extent.width * self.extent.height * self.extent.depth * pixel_size;
} }
pub inline fn getFormatPixelSize(format: vk.Format) usize { pub inline fn formatFromAspect(self: *const Self, aspect_mask: vk.ImageAspectFlags) vk.Format {
return lib.vku.vkuFormatTexelBlockSize(@intCast(@intFromEnum(format))); return lib.format.fromAspect(self.format, aspect_mask);
} }
pub inline fn getFormatTotalSize(self: *const Self, format: vk.Format) usize { pub inline fn formatToAspect(self: *const Self, aspect_mask: vk.ImageAspectFlags) vk.ImageAspectFlags {
const pixel_size = self.getFormatPixelSize(format); return lib.format.toAspect(self.format, aspect_mask);
return self.extent.width * self.extent.height * self.extent.depth * pixel_size;
}
pub fn formatSupportsColorAttachemendBlend(format: vk.Format) bool {
return switch (format) {
// Vulkan 1.1 mandatory
.r5g6b5_unorm_pack16,
.a1r5g5b5_unorm_pack16,
.r8_unorm,
.r8g8_unorm,
.r8g8b8a8_unorm,
.r8g8b8a8_srgb,
.b8g8r8a8_unorm,
.b8g8r8a8_srgb,
.a8b8g8r8_unorm_pack32,
.a8b8g8r8_srgb_pack32,
.a2b10g10r10_unorm_pack32,
.r16_sfloat,
.r16g16_sfloat,
.r16g16b16a16_sfloat,
// optional
.r4g4b4a4_unorm_pack16,
.b4g4r4a4_unorm_pack16,
.b5g6r5_unorm_pack16,
.r5g5b5a1_unorm_pack16,
.b5g5r5a1_unorm_pack16,
.a2r10g10b10_unorm_pack32,
.r16_unorm,
.r16g16_unorm,
.r16g16b16a16_unorm,
.r32_sfloat,
.r32g32_sfloat,
.r32g32b32a32_sfloat,
.b10g11r11_ufloat_pack32,
.a4r4g4b4_unorm_pack16,
.a4b4g4r4_unorm_pack16,
=> true,
else => false,
};
}
pub fn formatFromAspect(base_format: vk.Format, aspect: vk.ImageAspectFlags) vk.Format {
if (aspect.color_bit or (aspect.color_bit and aspect.stencil_bit)) {
return base_format;
} else if (aspect.depth_bit) {
if (base_format == .d16_unorm or base_format == .d16_unorm_s8_uint) {
return .d16_unorm;
} else if (base_format == .d24_unorm_s8_uint) {
return .x8_d24_unorm_pack32;
} else if (base_format == .d32_sfloat or base_format == .d32_sfloat_s8_uint) {
return .d32_sfloat;
}
} else if (aspect.stencil_bit) {
if (base_format == .s8_uint or base_format == .d16_unorm_s8_uint or base_format == .d24_unorm_s8_uint or base_format == .d32_sfloat_s8_uint) {
return .s8_uint;
}
}
lib.unsupported("format {d}", .{@intFromEnum(base_format)});
return base_format;
} }

View File

@@ -1,8 +1,7 @@
const std = @import("std"); const std = @import("std");
const builtin = @import("builtin"); const builtin = @import("builtin");
const vk = @import("vulkan"); const vk = @import("vulkan");
const config = @import("lib.zig").config;
const logger = @import("lib.zig").logger;
const VkError = @import("error_set.zig").VkError; const VkError = @import("error_set.zig").VkError;
const Dispatchable = @import("Dispatchable.zig").Dispatchable; const Dispatchable = @import("Dispatchable.zig").Dispatchable;
@@ -21,6 +20,12 @@ comptime {
const Self = @This(); const Self = @This();
pub const ObjectType: vk.ObjectType = .instance; pub const ObjectType: vk.ObjectType = .instance;
const DeviceAllocator = struct {
pub inline fn allocator(_: @This()) std.mem.Allocator {
return std.heap.smp_allocator;
}
};
physical_devices: std.ArrayList(*Dispatchable(PhysicalDevice)), physical_devices: std.ArrayList(*Dispatchable(PhysicalDevice)),
dispatch_table: *const DispatchTable, dispatch_table: *const DispatchTable,
vtable: *const VTable, vtable: *const VTable,
@@ -28,6 +33,7 @@ vtable: *const VTable,
pub const VTable = struct { pub const VTable = struct {
releasePhysicalDevices: *const fn (*Self, std.mem.Allocator) VkError!void, releasePhysicalDevices: *const fn (*Self, std.mem.Allocator) VkError!void,
requestPhysicalDevices: *const fn (*Self, std.mem.Allocator) VkError!void, requestPhysicalDevices: *const fn (*Self, std.mem.Allocator) VkError!void,
io: *const fn (*Self) std.Io,
}; };
pub const DispatchTable = struct { pub const DispatchTable = struct {
@@ -85,9 +91,6 @@ pub fn releasePhysicalDevices(self: *Self, allocator: std.mem.Allocator) VkError
} }
pub fn requestPhysicalDevices(self: *Self, allocator: std.mem.Allocator) VkError!void { pub fn requestPhysicalDevices(self: *Self, allocator: std.mem.Allocator) VkError!void {
logger.getManager().get().indent();
defer logger.getManager().get().unindent();
try self.vtable.requestPhysicalDevices(self, allocator); try self.vtable.requestPhysicalDevices(self, allocator);
if (self.physical_devices.items.len == 0) { if (self.physical_devices.items.len == 0) {
std.log.scoped(.vkCreateInstance).err("No VkPhysicalDevice found", .{}); std.log.scoped(.vkCreateInstance).err("No VkPhysicalDevice found", .{});
@@ -97,3 +100,7 @@ pub fn requestPhysicalDevices(self: *Self, allocator: std.mem.Allocator) VkError
std.log.scoped(.vkCreateInstance).debug("Found VkPhysicalDevice named {s}", .{physical_device.object.props.device_name}); std.log.scoped(.vkCreateInstance).debug("Found VkPhysicalDevice named {s}", .{physical_device.object.props.device_name});
} }
} }
pub fn io(self: *Self) std.Io {
return self.vtable.io(self);
}

View File

@@ -13,7 +13,7 @@ props: vk.PhysicalDeviceProperties,
mem_props: vk.PhysicalDeviceMemoryProperties, mem_props: vk.PhysicalDeviceMemoryProperties,
features: vk.PhysicalDeviceFeatures, features: vk.PhysicalDeviceFeatures,
queue_family_props: std.ArrayList(vk.QueueFamilyProperties), queue_family_props: std.ArrayList(vk.QueueFamilyProperties),
instance: *const Instance, instance: *Instance,
dispatch_table: *const DispatchTable, dispatch_table: *const DispatchTable,
pub const DispatchTable = struct { pub const DispatchTable = struct {
@@ -25,7 +25,7 @@ pub const DispatchTable = struct {
release: *const fn (*Self, std.mem.Allocator) VkError!void, release: *const fn (*Self, std.mem.Allocator) VkError!void,
}; };
pub fn init(allocator: std.mem.Allocator, instance: *const Instance) VkError!Self { pub fn init(allocator: std.mem.Allocator, instance: *Instance) VkError!Self {
_ = allocator; _ = allocator;
return .{ return .{
.props = .{ .props = .{

View File

@@ -53,9 +53,8 @@ pub const SubmitInfo = struct {
} }
fn deinitBlob(allocator: std.mem.Allocator, self: *std.ArrayList(SubmitInfo)) void { fn deinitBlob(allocator: std.mem.Allocator, self: *std.ArrayList(SubmitInfo)) void {
for (self.items) |submit_info| { for (self.items) |*submit_info| {
const command_buffers = &submit_info.command_buffers; submit_info.command_buffers.deinit(allocator);
@constCast(command_buffers).deinit(allocator);
} }
self.deinit(allocator); self.deinit(allocator);
} }

View File

@@ -9,16 +9,22 @@ const Alignment = std.mem.Alignment;
const Self = @This(); const Self = @This();
const FallbackAllocator = struct {
pub inline fn allocator(_: @This()) std.mem.Allocator {
return std.heap.smp_allocator;
}
};
callbacks: ?vk.AllocationCallbacks, callbacks: ?vk.AllocationCallbacks,
scope: vk.SystemAllocationScope, scope: vk.SystemAllocationScope,
fallback_allocator: std.heap.ThreadSafeAllocator, fallback_allocator: FallbackAllocator,
pub fn init(callbacks: ?*const vk.AllocationCallbacks, scope: vk.SystemAllocationScope) Self { pub fn init(callbacks: ?*const vk.AllocationCallbacks, scope: vk.SystemAllocationScope) Self {
const deref_callbacks = if (callbacks) |c| c.* else null; const deref_callbacks = if (callbacks) |c| c.* else null;
return .{ return .{
.callbacks = deref_callbacks, .callbacks = deref_callbacks,
.scope = scope, .scope = scope,
.fallback_allocator = .{ .child_allocator = std.heap.c_allocator }, .fallback_allocator = .{},
}; };
} }

View File

@@ -50,7 +50,7 @@ pub const VkError = error{
IncompatibleShaderBinaryExt, IncompatibleShaderBinaryExt,
PipelineBinaryMissingKhr, PipelineBinaryMissingKhr,
NotEnoughSpaceKhr, NotEnoughSpaceKhr,
// ====== Internal errors // == Set of internal errors for better debugging. They map to VK_UNKNOWN_ERROR
InvalidHandleDrv, InvalidHandleDrv,
InvalidPipelineDrv, InvalidPipelineDrv,
InvalidDeviceMemoryDrv, InvalidDeviceMemoryDrv,

120
src/vulkan/format.zig git.filemode.normal_file
View File

@@ -0,0 +1,120 @@
const std = @import("std");
const vk = @import("vulkan");
const lib = @import("lib.zig");
pub fn fromAspect(format: vk.Format, aspect: vk.ImageAspectFlags) vk.Format {
if (aspect.color_bit or (aspect.color_bit and aspect.stencil_bit)) {
return format;
} else if (aspect.depth_bit) {
if (format == .d16_unorm or format == .d16_unorm_s8_uint) {
return .d16_unorm;
} else if (format == .d24_unorm_s8_uint) {
return .x8_d24_unorm_pack32;
} else if (format == .d32_sfloat or format == .d32_sfloat_s8_uint) {
return .d32_sfloat;
}
} else if (aspect.stencil_bit) {
if (format == .s8_uint or format == .d16_unorm_s8_uint or format == .d24_unorm_s8_uint or format == .d32_sfloat_s8_uint) {
return .s8_uint;
}
}
lib.unsupported("format {s}", .{@tagName(format)});
return format;
}
pub fn toAspect(format: vk.Format) vk.ImageAspectFlags {
var aspect: vk.ImageAspectFlags = .{};
if (lib.vku.vkuFormatHasDepth(@intCast(@intFromEnum(format))))
aspect.depth_bit = true;
if (lib.vku.vkuFormatHasStencil(@intCast(@intFromEnum(format))))
aspect.stencil_bit = true;
if (aspect.toInt() == 0)
aspect.color_bit = true;
return aspect;
}
pub inline fn texelSize(format: vk.Format) usize {
return lib.vku.vkuFormatTexelBlockSize(@intCast(@intFromEnum(format)));
}
pub inline fn supportsColorAttachemendBlend(format: vk.Format) bool {
return switch (format) {
// Vulkan 1.1 mandatory
.r5g6b5_unorm_pack16,
.a1r5g5b5_unorm_pack16,
.r8_unorm,
.r8g8_unorm,
.r8g8b8a8_unorm,
.r8g8b8a8_srgb,
.b8g8r8a8_unorm,
.b8g8r8a8_srgb,
.a8b8g8r8_unorm_pack32,
.a8b8g8r8_srgb_pack32,
.a2b10g10r10_unorm_pack32,
.r16_sfloat,
.r16g16_sfloat,
.r16g16b16a16_sfloat,
// optional
.r4g4b4a4_unorm_pack16,
.b4g4r4a4_unorm_pack16,
.b5g6r5_unorm_pack16,
.r5g5b5a1_unorm_pack16,
.b5g5r5a1_unorm_pack16,
.a2r10g10b10_unorm_pack32,
.r16_unorm,
.r16g16_unorm,
.r16g16b16a16_unorm,
.r32_sfloat,
.r32g32_sfloat,
.r32g32b32a32_sfloat,
.b10g11r11_ufloat_pack32,
.a4r4g4b4_unorm_pack16,
.a4b4g4r4_unorm_pack16,
=> true,
else => false,
};
}
pub inline fn pitchMemSize(format: vk.Format, width: usize) usize {
// To be updated for compressed formats handling
return texelSize(format) * width;
}
pub inline fn sliceMemSize(format: vk.Format, width: usize, height: usize) usize {
// To be updated for compressed formats handling
return pitchMemSize(format, width) * height;
}
pub inline fn isDepthAndStencil(format: vk.Format) bool {
return lib.vku.vkuFormatIsDepthAndStencil(@intCast(@intFromEnum(format)));
}
pub inline fn isSrgb(format: vk.Format) bool {
return lib.vku.vkuFormatIsSRGB(@intCast(@intFromEnum(format)));
}
pub inline fn isSfloat(format: vk.Format) bool {
return lib.vku.vkuFormatIsSFLOAT(@intCast(@intFromEnum(format)));
}
pub inline fn isSint(format: vk.Format) bool {
return lib.vku.vkuFormatIsSINT(@intCast(@intFromEnum(format)));
}
pub inline fn isSnorm(format: vk.Format) bool {
return lib.vku.vkuFormatIsSNORM(@intCast(@intFromEnum(format)));
}
pub inline fn isUfloat(format: vk.Format) bool {
return lib.vku.vkuFormatIsUFLOAT(@intCast(@intFromEnum(format)));
}
pub inline fn isUint(format: vk.Format) bool {
return lib.vku.vkuFormatIsUINT(@intCast(@intFromEnum(format)));
}
pub inline fn isUnorm(format: vk.Format) bool {
return lib.vku.vkuFormatIsUNORM(@intCast(@intFromEnum(format)));
}

View File

@@ -9,7 +9,9 @@ pub const vku = @cImport({
pub const errors = @import("error_set.zig"); pub const errors = @import("error_set.zig");
pub const lib_vulkan = @import("lib_vulkan.zig"); pub const lib_vulkan = @import("lib_vulkan.zig");
pub const logger = @import("logger/logger.zig"); pub const logger = @import("logger.zig");
pub const format = @import("format.zig");
pub const config = @import("config");
pub const Dispatchable = @import("Dispatchable.zig").Dispatchable; pub const Dispatchable = @import("Dispatchable.zig").Dispatchable;
pub const NonDispatchable = @import("NonDispatchable.zig").NonDispatchable; pub const NonDispatchable = @import("NonDispatchable.zig").NonDispatchable;
@@ -83,27 +85,6 @@ pub const std_options: std.Options = .{
.logFn = logger.log, .logFn = logger.log,
}; };
pub const LogVerboseLevel = enum {
None,
Standard,
High,
TooMuch,
};
pub inline fn getLogVerboseLevel() LogVerboseLevel {
const allocator = std.heap.c_allocator;
const level = std.process.getEnvVarOwned(allocator, DRIVER_LOGS_ENV_NAME) catch return .None;
defer allocator.free(level);
return if (std.mem.eql(u8, level, "none"))
.None
else if (std.mem.eql(u8, level, "all"))
.High
else if (std.mem.eql(u8, level, "stupid"))
.TooMuch
else
.Standard;
}
pub inline fn unsupported(comptime fmt: []const u8, args: anytype) void { pub inline fn unsupported(comptime fmt: []const u8, args: anytype) void {
std.log.scoped(.UNSUPPORTED).warn(fmt, args); std.log.scoped(.UNSUPPORTED).warn(fmt, args);
} }

View File

@@ -44,22 +44,14 @@ pub const RenderPass = @import("RenderPass.zig");
pub const Sampler = @import("Sampler.zig"); pub const Sampler = @import("Sampler.zig");
pub const ShaderModule = @import("ShaderModule.zig"); pub const ShaderModule = @import("ShaderModule.zig");
fn entryPointBeginLogTrace(comptime scope: @Type(.enum_literal)) void { fn entryPointBeginLogTrace(comptime scope: @EnumLiteral()) void {
std.log.scoped(scope).debug("Calling {s}...", .{@tagName(scope)}); std.log.scoped(scope).debug("Calling {s}...", .{@tagName(scope)});
logger.getManager().get().indent();
} }
fn entryPointEndLogTrace() void { fn entryPointEndLogTrace() void {}
logger.getManager().get().unindent();
}
fn entryPointNotFoundErrorLog(comptime scope: @Type(.enum_literal), name: []const u8) void {
if (lib.getLogVerboseLevel() != .TooMuch) return;
std.log.scoped(scope).err("Could not find function {s}", .{name});
}
inline fn notImplementedWarning() void { inline fn notImplementedWarning() void {
logger.nestedFixme("function not yet implemented", .{}); logger.fixme("function not yet implemented", .{});
} }
fn functionMapEntryPoint(comptime name: []const u8) struct { []const u8, vk.PfnVoidFunction } { fn functionMapEntryPoint(comptime name: []const u8) struct { []const u8, vk.PfnVoidFunction } {
@@ -251,9 +243,6 @@ pub export fn stroll_icdNegotiateLoaderICDInterfaceVersion(p_version: *u32) call
} }
pub export fn vk_icdGetInstanceProcAddr(p_instance: vk.Instance, p_name: ?[*:0]const u8) callconv(vk.vulkan_call_conv) vk.PfnVoidFunction { pub export fn vk_icdGetInstanceProcAddr(p_instance: vk.Instance, p_name: ?[*:0]const u8) callconv(vk.vulkan_call_conv) vk.PfnVoidFunction {
if (lib.getLogVerboseLevel() == .TooMuch) {
entryPointBeginLogTrace(.vk_icdGetInstanceProcAddr);
}
defer entryPointEndLogTrace(); defer entryPointEndLogTrace();
if (p_name == null) return null; if (p_name == null) return null;
@@ -264,9 +253,6 @@ pub export fn vk_icdGetInstanceProcAddr(p_instance: vk.Instance, p_name: ?[*:0]c
} }
pub export fn stroll_icdGetPhysicalDeviceProcAddr(_: vk.Instance, p_name: ?[*:0]const u8) callconv(vk.vulkan_call_conv) vk.PfnVoidFunction { pub export fn stroll_icdGetPhysicalDeviceProcAddr(_: vk.Instance, p_name: ?[*:0]const u8) callconv(vk.vulkan_call_conv) vk.PfnVoidFunction {
if (lib.getLogVerboseLevel() == .TooMuch) {
entryPointBeginLogTrace(.vk_icdGetPhysicalDeviceProcAddr);
}
defer entryPointEndLogTrace(); defer entryPointEndLogTrace();
if (p_name == null) return null; if (p_name == null) return null;
@@ -274,16 +260,12 @@ pub export fn stroll_icdGetPhysicalDeviceProcAddr(_: vk.Instance, p_name: ?[*:0]
if (physical_device_pfn_map.get(name)) |pfn| return pfn; if (physical_device_pfn_map.get(name)) |pfn| return pfn;
entryPointNotFoundErrorLog(.vk_icdGetPhysicalDeviceProcAddr, name);
return null; return null;
} }
// Global functions ========================================================================================================================================== // Global functions ==========================================================================================================================================
pub export fn vkGetInstanceProcAddr(p_instance: vk.Instance, p_name: ?[*:0]const u8) callconv(vk.vulkan_call_conv) vk.PfnVoidFunction { pub export fn vkGetInstanceProcAddr(p_instance: vk.Instance, p_name: ?[*:0]const u8) callconv(vk.vulkan_call_conv) vk.PfnVoidFunction {
if (lib.getLogVerboseLevel() == .TooMuch) {
entryPointBeginLogTrace(.vkGetInstanceProcAddr);
}
defer entryPointEndLogTrace(); defer entryPointEndLogTrace();
if (p_name == null) return null; if (p_name == null) return null;
@@ -295,7 +277,6 @@ pub export fn vkGetInstanceProcAddr(p_instance: vk.Instance, p_name: ?[*:0]const
if (physical_device_pfn_map.get(name)) |pfn| return pfn; if (physical_device_pfn_map.get(name)) |pfn| return pfn;
if (device_pfn_map.get(name)) |pfn| return pfn; if (device_pfn_map.get(name)) |pfn| return pfn;
} }
entryPointNotFoundErrorLog(.vkGetInstanceProcAddr, name);
return null; return null;
} }
@@ -342,8 +323,6 @@ pub export fn strollEnumerateInstanceVersion(version: *u32) callconv(vk.vulkan_c
// Instance functions ======================================================================================================================================== // Instance functions ========================================================================================================================================
pub export fn strollDestroyInstance(p_instance: vk.Instance, callbacks: ?*const vk.AllocationCallbacks) callconv(vk.vulkan_call_conv) void { pub export fn strollDestroyInstance(p_instance: vk.Instance, callbacks: ?*const vk.AllocationCallbacks) callconv(vk.vulkan_call_conv) void {
defer logger.getManager().deinit();
entryPointBeginLogTrace(.vkDestroyInstance); entryPointBeginLogTrace(.vkDestroyInstance);
defer entryPointEndLogTrace(); defer entryPointEndLogTrace();
@@ -624,7 +603,7 @@ pub export fn strollAllocateCommandBuffers(p_device: vk.Device, info: *const vk.
} }
pub export fn strollAllocateDescriptorSets(p_device: vk.Device, info: *const vk.DescriptorSetAllocateInfo, p_sets: [*]vk.DescriptorSet) callconv(vk.vulkan_call_conv) vk.Result { pub export fn strollAllocateDescriptorSets(p_device: vk.Device, info: *const vk.DescriptorSetAllocateInfo, p_sets: [*]vk.DescriptorSet) callconv(vk.vulkan_call_conv) vk.Result {
entryPointBeginLogTrace(.vkAllocateCommandBuffers); entryPointBeginLogTrace(.vkAllocateDescriptorSets);
defer entryPointEndLogTrace(); defer entryPointEndLogTrace();
Dispatchable(Device).checkHandleValidity(p_device) catch |err| return toVkResult(err); Dispatchable(Device).checkHandleValidity(p_device) catch |err| return toVkResult(err);
@@ -1322,9 +1301,6 @@ pub export fn strollGetDeviceMemoryCommitment(p_device: vk.Device, p_memory: vk.
} }
pub export fn strollGetDeviceProcAddr(p_device: vk.Device, p_name: ?[*:0]const u8) callconv(vk.vulkan_call_conv) vk.PfnVoidFunction { pub export fn strollGetDeviceProcAddr(p_device: vk.Device, p_name: ?[*:0]const u8) callconv(vk.vulkan_call_conv) vk.PfnVoidFunction {
if (lib.getLogVerboseLevel() == .TooMuch) {
entryPointBeginLogTrace(.vkGetDeviceProcAddr);
}
defer entryPointEndLogTrace(); defer entryPointEndLogTrace();
if (p_name == null) return null; if (p_name == null) return null;
@@ -1333,7 +1309,6 @@ pub export fn strollGetDeviceProcAddr(p_device: vk.Device, p_name: ?[*:0]const u
if (p_device == .null_handle) return null; if (p_device == .null_handle) return null;
if (device_pfn_map.get(name)) |pfn| return pfn; if (device_pfn_map.get(name)) |pfn| return pfn;
entryPointNotFoundErrorLog(.vkGetDeviceProcAddr, name);
return null; return null;
} }
@@ -1385,7 +1360,7 @@ pub export fn strollGetImageMemoryRequirements(p_device: vk.Device, p_image: vk.
Dispatchable(Device).checkHandleValidity(p_device) catch |err| return errorLogger(err); Dispatchable(Device).checkHandleValidity(p_device) catch |err| return errorLogger(err);
const image = NonDispatchable(Image).fromHandleObject(p_image) catch |err| return errorLogger(err); const image = NonDispatchable(Image).fromHandleObject(p_image) catch |err| return errorLogger(err);
image.getMemoryRequirements(requirements); image.getMemoryRequirements(requirements) catch |err| return errorLogger(err);
} }
pub export fn strollGetImageSparseMemoryRequirements(p_device: vk.Device, p_image: vk.Image, requirements: *vk.SparseImageMemoryRequirements) callconv(vk.vulkan_call_conv) void { pub export fn strollGetImageSparseMemoryRequirements(p_device: vk.Device, p_image: vk.Image, requirements: *vk.SparseImageMemoryRequirements) callconv(vk.vulkan_call_conv) void {
@@ -1517,28 +1492,18 @@ pub export fn strollResetCommandPool(p_device: vk.Device, p_pool: vk.CommandPool
Dispatchable(Device).checkHandleValidity(p_device) catch |err| return toVkResult(err); Dispatchable(Device).checkHandleValidity(p_device) catch |err| return toVkResult(err);
const pool = NonDispatchable(CommandPool).fromHandleObject(p_pool) catch |err| return toVkResult(err); const pool = NonDispatchable(CommandPool).fromHandleObject(p_pool) catch |err| return toVkResult(err);
pool.reset(flags) catch |err| return toVkResult(err);
notImplementedWarning(); return .success;
_ = pool;
_ = flags;
return .error_unknown;
} }
pub export fn strollResetDescriptorPool(p_device: vk.Device, p_pool: vk.DescriptorPool, flags: vk.CommandPoolResetFlags) callconv(vk.vulkan_call_conv) vk.Result { pub export fn strollResetDescriptorPool(p_device: vk.Device, p_pool: vk.DescriptorPool, flags: vk.DescriptorPoolResetFlags) callconv(vk.vulkan_call_conv) vk.Result {
entryPointBeginLogTrace(.vkResetDescriptorPool); entryPointBeginLogTrace(.vkResetDescriptorPool);
defer entryPointEndLogTrace(); defer entryPointEndLogTrace();
Dispatchable(Device).checkHandleValidity(p_device) catch |err| return toVkResult(err); Dispatchable(Device).checkHandleValidity(p_device) catch |err| return toVkResult(err);
const pool = NonDispatchable(DescriptorPool).fromHandleObject(p_pool) catch |err| return toVkResult(err); const pool = NonDispatchable(DescriptorPool).fromHandleObject(p_pool) catch |err| return toVkResult(err);
pool.reset(flags) catch |err| return toVkResult(err);
notImplementedWarning(); return .success;
_ = pool;
_ = flags;
return .error_unknown;
} }
pub export fn strollResetEvent(p_device: vk.Device, p_event: vk.Fence) callconv(vk.vulkan_call_conv) vk.Result { pub export fn strollResetEvent(p_device: vk.Device, p_event: vk.Fence) callconv(vk.vulkan_call_conv) vk.Result {
@@ -1735,16 +1700,7 @@ pub export fn strollCmdBlitImage(
const src = NonDispatchable(Image).fromHandleObject(p_src_image) catch |err| return errorLogger(err); const src = NonDispatchable(Image).fromHandleObject(p_src_image) catch |err| return errorLogger(err);
const dst = NonDispatchable(Image).fromHandleObject(p_dst_image) catch |err| return errorLogger(err); const dst = NonDispatchable(Image).fromHandleObject(p_dst_image) catch |err| return errorLogger(err);
notImplementedWarning(); cmd.blitImage(src, src_layout, dst, dst_layout, regions[0..count], filter) catch |err| return errorLogger(err);
_ = cmd;
_ = src;
_ = src_layout;
_ = dst;
_ = dst_layout;
_ = count;
_ = regions;
_ = filter;
} }
pub export fn strollCmdClearAttachments(p_cmd: vk.CommandBuffer, attachment_count: u32, attachments: [*]const vk.ClearAttachment, rect_count: u32, rects: [*]const vk.ClearRect) callconv(vk.vulkan_call_conv) void { pub export fn strollCmdClearAttachments(p_cmd: vk.CommandBuffer, attachment_count: u32, attachments: [*]const vk.ClearAttachment, rect_count: u32, rects: [*]const vk.ClearRect) callconv(vk.vulkan_call_conv) void {
@@ -1957,12 +1913,10 @@ pub export fn strollCmdExecuteCommands(p_cmd: vk.CommandBuffer, count: u32, p_cm
defer entryPointEndLogTrace(); defer entryPointEndLogTrace();
const cmd = Dispatchable(CommandBuffer).fromHandleObject(p_cmd) catch |err| return errorLogger(err); const cmd = Dispatchable(CommandBuffer).fromHandleObject(p_cmd) catch |err| return errorLogger(err);
for (p_cmds, 0..count) |p_sec_cmd, _| {
notImplementedWarning(); const sec_cmd = Dispatchable(CommandBuffer).fromHandleObject(p_sec_cmd) catch |err| return errorLogger(err);
cmd.executeCommands(sec_cmd) catch |err| return errorLogger(err);
_ = cmd; }
_ = count;
_ = p_cmds;
} }
pub export fn strollCmdFillBuffer(p_cmd: vk.CommandBuffer, p_buffer: vk.Buffer, offset: vk.DeviceSize, size: vk.DeviceSize, data: u32) callconv(vk.vulkan_call_conv) void { pub export fn strollCmdFillBuffer(p_cmd: vk.CommandBuffer, p_buffer: vk.Buffer, offset: vk.DeviceSize, size: vk.DeviceSize, data: u32) callconv(vk.vulkan_call_conv) void {

118
src/vulkan/logger.zig git.filemode.normal_file
View File

@@ -0,0 +1,118 @@
const std = @import("std");
const builtin = @import("builtin");
const root = @import("root");
const lib = @import("lib.zig");
comptime {
if (!builtin.is_test) {
if (!@hasDecl(root, "DRIVER_NAME")) {
@compileError("Missing DRIVER_NAME in module root");
}
}
}
var mutex: std.Io.Mutex = .init;
pub inline fn fixme(comptime format: []const u8, args: anytype) void {
if (!lib.config.logs) {
return;
}
std.log.scoped(.FIXME).warn("FIXME: " ++ format, args);
}
pub fn log(comptime level: std.log.Level, comptime scope: @EnumLiteral(), comptime format: []const u8, args: anytype) void {
if (!lib.config.logs) {
return;
}
const scope_name = @tagName(scope);
const scope_prefix = comptime blk: {
const limit = 30 - 4;
break :blk if (scope_name.len >= limit)
std.fmt.comptimePrint("({s}...): ", .{scope_name[0..(limit - 3)]})
else
std.fmt.comptimePrint("({s}): ", .{scope_name});
};
const prefix = std.fmt.comptimePrint("{s: <10}", .{"[" ++ comptime level.asText() ++ "] "});
const level_color: std.Io.Terminal.Color = switch (level) {
.info, .debug => .blue,
.warn => .magenta,
.err => .red,
};
const allocator = std.heap.smp_allocator;
var threaded: std.Io.Threaded = .init(allocator, .{});
defer threaded.deinit();
const io = threaded.io();
const stderr_file = std.Io.File.stderr();
const stdout_file = std.Io.File.stdout();
const file = switch (level) {
.info, .debug => stdout_file,
.warn, .err => stderr_file,
};
file.lock(io, .exclusive) catch {};
defer file.unlock(io);
const now = std.Io.Timestamp.now(io, .cpu_process).toMilliseconds();
const now_ms: u16 = @intCast(@mod(now, std.time.ms_per_s));
const now_sec: u8 = @intCast(@mod(@divTrunc(now, std.time.ms_per_s), std.time.s_per_min));
const now_min: u8 = @intCast(@mod(@divTrunc(now, std.time.ms_per_min), 60));
const now_hour: u8 = @intCast(@mod(@divTrunc(now, std.time.ms_per_hour), 24));
var fmt_buffer = std.mem.zeroes([4096]u8);
var fmt_writer = std.Io.Writer.fixed(&fmt_buffer);
fmt_writer.print(format ++ "\n", args) catch {};
fmt_writer.flush() catch return;
mutex.lock(io) catch return;
defer mutex.unlock(io);
var last_pos: usize = 0;
while (std.mem.indexOfScalarPos(u8, &fmt_buffer, last_pos, '\n')) |pos| : (last_pos = pos + 1) {
var buffer = std.mem.zeroes([512]u8);
var file_writer = file.writer(io, &buffer);
var writer = &file_writer.interface;
const term: std.Io.Terminal = .{
.writer = writer,
.mode = std.Io.Terminal.Mode.detect(io, file, false, false) catch return,
};
term.setColor(.magenta) catch {};
writer.print("[StrollDriver ", .{}) catch continue;
if (!builtin.is_test) {
term.setColor(.cyan) catch {};
writer.print(root.DRIVER_NAME, .{}) catch continue;
}
term.setColor(.yellow) catch {};
writer.print(" {d}:{d}:{d}.{d:0>3}", .{ now_hour, now_min, now_sec, now_ms }) catch continue;
term.setColor(.magenta) catch {};
writer.print("]", .{}) catch continue;
term.setColor(.cyan) catch {};
writer.print("[Thread {d: >8}]", .{std.Thread.getCurrentId()}) catch continue;
term.setColor(level_color) catch {};
writer.print(prefix, .{}) catch continue;
term.setColor(switch (level) {
.err => .red,
.warn => .magenta,
else => .green,
}) catch {};
writer.print("{s: >30}", .{scope_prefix}) catch continue;
term.setColor(.reset) catch {};
writer.print("{s}\n", .{fmt_buffer[last_pos..pos]}) catch continue;
writer.flush() catch continue;
}
}

View File

@@ -1,41 +0,0 @@
const std = @import("std");
const Self = @This();
pub const Element = struct {
log: [512]u8,
indent_level: usize,
log_level: std.log.Level,
};
stack: std.ArrayList(Element),
allocator: std.mem.Allocator = std.heap.c_allocator,
pub const empty: Self = .{
.stack = .empty,
};
pub fn pushBack(self: *Self, element: Element) !void {
try self.stack.append(self.allocator, element);
}
pub fn popBack(self: *Self) ?Element {
return self.stack.pop();
}
pub fn popFront(self: *Self) Element {
return self.stack.orderedRemove(0);
}
pub fn getLastOrNull(self: *Self) ?Element {
return self.stack.getLastOrNull();
}
pub inline fn len(self: *Self) usize {
return self.stack.items.len;
}
pub fn deinit(self: *Self) void {
self.stack.deinit(self.allocator);
self.* = .empty;
}

View File

@@ -1,54 +0,0 @@
const std = @import("std");
const DebugStack = @import("DebugStack.zig");
const lib = @import("../lib.zig");
const Self = @This();
indent_enabled: bool,
indent_level: usize,
debug_stack: DebugStack,
pub const init: Self = .{
.indent_enabled = true,
.indent_level = 0,
.debug_stack = .empty,
};
pub fn indent(self: *Self) void {
if (lib.getLogVerboseLevel() == .None) {
return;
}
const new_indent_level, const has_overflown = @addWithOverflow(self.indent_level, 1);
if (has_overflown == 0) {
self.indent_level = new_indent_level;
}
}
pub fn unindent(self: *Self) void {
if (lib.getLogVerboseLevel() == .None) {
return;
}
const new_indent_level, const has_overflown = @subWithOverflow(self.indent_level, 1);
if (has_overflown == 0) {
self.indent_level = new_indent_level;
}
loop: while (self.debug_stack.getLastOrNull()) |last| {
if (last.indent_level >= self.indent_level) {
_ = self.debug_stack.popBack();
} else {
break :loop;
}
}
}
pub inline fn enableIndent(self: *Self) void {
self.indent_enabled = true;
}
pub inline fn disableIndent(self: *Self) void {
self.indent_enabled = false;
}
pub inline fn deinit(self: *Self) void {
self.debug_stack.deinit();
}

View File

@@ -1,39 +0,0 @@
const std = @import("std");
const Manager = @import("Manager.zig");
const Self = @This();
managers: std.AutoArrayHashMapUnmanaged(std.Thread.Id, Manager),
allocator: std.heap.ThreadSafeAllocator,
mutex: std.Thread.Mutex,
pub const init: Self = .{
.managers = .empty,
.allocator = .{ .child_allocator = std.heap.c_allocator },
.mutex = .{},
};
pub fn get(self: *Self) *Manager {
const allocator = self.allocator.allocator();
self.mutex.lock();
defer self.mutex.unlock();
return (self.managers.getOrPutValue(allocator, std.Thread.getCurrentId(), .init) catch @panic("Out of memory")).value_ptr;
}
pub fn deinit(self: *Self) void {
{
self.mutex.lock();
defer self.mutex.unlock();
if (self.managers.getPtr(std.Thread.getCurrentId())) |manager| {
manager.deinit();
_ = self.managers.orderedRemove(std.Thread.getCurrentId());
}
}
if (self.managers.count() == 0) {
self.managers.deinit(self.allocator.allocator());
self.* = .init;
}
}

View File

@@ -1,146 +0,0 @@
//! A driver-global logger that stack in memory all same-indent `debug` logs
//! and only displays them in reverse order if a non-debug log is requested
const std = @import("std");
const builtin = @import("builtin");
const zdt = @import("zdt");
const root = @import("root");
const lib = @import("../lib.zig");
const ThreadSafeManager = @import("ThreadSafeManager.zig");
comptime {
if (!builtin.is_test) {
if (!@hasDecl(root, "DRIVER_NAME")) {
@compileError("Missing DRIVER_NAME in module root");
}
}
}
var manager: ThreadSafeManager = .init;
pub inline fn getManager() *ThreadSafeManager {
return &manager;
}
pub inline fn fixme(comptime format: []const u8, args: anytype) void {
if (lib.getLogVerboseLevel() == .None) {
return;
}
getManager().get().disableIndent();
defer getManager().get().enableIndent();
nestedFixme(format, args);
}
pub inline fn nestedFixme(comptime format: []const u8, args: anytype) void {
if (lib.getLogVerboseLevel() == .None) {
return;
}
std.log.scoped(.FIXME).warn("FIXME: " ++ format, args);
}
pub fn log(comptime level: std.log.Level, comptime scope: @Type(.enum_literal), comptime format: []const u8, args: anytype) void {
if (lib.getLogVerboseLevel() == .None) {
return;
}
const scope_name = @tagName(scope);
const scope_prefix = comptime blk: {
const limit = 30 - 4;
break :blk if (scope_name.len >= limit)
std.fmt.comptimePrint("({s}...): ", .{scope_name[0..(limit - 3)]})
else
std.fmt.comptimePrint("({s}): ", .{scope_name});
};
const prefix = std.fmt.comptimePrint("{s: <10}", .{"[" ++ comptime level.asText() ++ "] "});
const level_color: std.Io.tty.Color = switch (level) {
.info, .debug => .blue,
.warn => .magenta,
.err => .red,
};
std.debug.lockStdErr();
defer std.debug.unlockStdErr();
var stderr_file = std.fs.File.stderr();
var stdout_file = std.fs.File.stdout();
const file = switch (level) {
.info, .debug => stdout_file,
.warn, .err => stderr_file,
};
var timezone = zdt.Timezone.tzLocal(std.heap.page_allocator) catch zdt.Timezone.UTC;
defer timezone.deinit();
const now = zdt.Datetime.now(.{ .tz = &timezone }) catch zdt.Datetime{};
var fmt_buffer = std.mem.zeroes([4096]u8);
var fmt_writer = std.Io.Writer.fixed(&fmt_buffer);
fmt_writer.print(format ++ "\n", args) catch {};
fmt_writer.flush() catch return;
var last_pos: usize = 0;
while (std.mem.indexOfScalarPos(u8, &fmt_buffer, last_pos, '\n')) |pos| {
var buffer = std.mem.zeroes([512]u8);
var out_config = std.Io.tty.Config.detect(file);
var writer = std.Io.Writer.fixed(&buffer);
out_config.setColor(&writer, .magenta) catch {};
writer.print("[StrollDriver ", .{}) catch {};
if (!builtin.is_test) {
out_config.setColor(&writer, .cyan) catch {};
writer.print(root.DRIVER_NAME, .{}) catch {};
}
out_config.setColor(&writer, .yellow) catch {};
writer.print(" {d:02}:{d:02}:{d:02}.{d:03}", .{ now.hour, now.minute, now.second, @divFloor(now.nanosecond, std.time.ns_per_ms) }) catch {};
out_config.setColor(&writer, .magenta) catch {};
writer.print("]", .{}) catch {};
out_config.setColor(&writer, level_color) catch {};
writer.print(prefix, .{}) catch {};
out_config.setColor(&writer, switch (level) {
.err => .red,
.warn => .magenta,
else => .green,
}) catch {};
writer.print("{s: >30}", .{scope_prefix}) catch {};
out_config.setColor(&writer, .reset) catch {};
if (getManager().get().indent_enabled) {
for (0..getManager().get().indent_level) |_| {
writer.print("> ", .{}) catch {};
}
}
writer.print("{s}\n", .{fmt_buffer[last_pos..pos]}) catch {};
writer.flush() catch return;
if (level == .debug and lib.getLogVerboseLevel() == .Standard) {
getManager().get().debug_stack.pushBack(.{
.log = buffer,
.indent_level = getManager().get().indent_level,
.log_level = level,
}) catch return;
return;
}
if (getManager().get().indent_enabled) {
while (getManager().get().debug_stack.len() != 0) {
const elem = getManager().get().debug_stack.popFront();
switch (elem.log_level) {
.info, .debug => _ = stdout_file.write(&elem.log) catch {},
.warn, .err => _ = stderr_file.write(&elem.log) catch {},
}
}
}
switch (level) {
.info, .debug => _ = stdout_file.write(&buffer) catch {},
.warn, .err => _ = stderr_file.write(&buffer) catch {},
}
last_pos = pos + 1;
}
}

View File

@@ -1,215 +0,0 @@
#include <stdio.h>
#include <stdlib.h>
#define VK_NO_PROTOTYPES
#include <vulkan/vulkan_core.h>
#include <unistd.h>
#include <dlfcn.h>
#ifndef LIBVK
#define LIBVK "vulkan"
#endif
#define VOLK_IMPLEMENTATION
#include <volk.h>
#define KVF_IMPLEMENTATION
#define KVF_ENABLE_VALIDATION_LAYERS
#define KVF_NO_KHR
#include <kvf.h>
#define STB_IMAGE_WRITE_IMPLEMENTATION
#include <stb_image_write.h>
static const uint32_t vertex_shader[] = {
0x07230203,0x00010000,0x000d000b,0x00000036,0x00000000,0x00020011,0x00000001,0x0006000b,0x00000001,0x4c534c47,0x6474732e,0x3035342e,
0x00000000,0x0003000e,0x00000000,0x00000001,0x0008000f,0x00000000,0x00000004,0x6e69616d,0x00000000,0x00000022,0x00000026,0x00000031,
0x00030003,0x00000002,0x000001c2,0x000a0004,0x475f4c47,0x4c474f4f,0x70635f45,0x74735f70,0x5f656c79,0x656e696c,0x7269645f,0x69746365,
0x00006576,0x00080004,0x475f4c47,0x4c474f4f,0x6e695f45,0x64756c63,0x69645f65,0x74636572,0x00657669,0x00040005,0x00000004,0x6e69616d,
0x00000000,0x00050005,0x0000000c,0x69736f70,0x6e6f6974,0x00000073,0x00040005,0x00000017,0x6f6c6f63,0x00007372,0x00060005,0x00000020,
0x505f6c67,0x65567265,0x78657472,0x00000000,0x00060006,0x00000020,0x00000000,0x505f6c67,0x7469736f,0x006e6f69,0x00070006,0x00000020,
0x00000001,0x505f6c67,0x746e696f,0x657a6953,0x00000000,0x00070006,0x00000020,0x00000002,0x435f6c67,0x4470696c,0x61747369,0x0065636e,
0x00070006,0x00000020,0x00000003,0x435f6c67,0x446c6c75,0x61747369,0x0065636e,0x00030005,0x00000022,0x00000000,0x00060005,0x00000026,
0x565f6c67,0x65747265,0x646e4978,0x00007865,0x00050005,0x00000031,0x67617266,0x6f6c6f43,0x00000072,0x00050048,0x00000020,0x00000000,
0x0000000b,0x00000000,0x00050048,0x00000020,0x00000001,0x0000000b,0x00000001,0x00050048,0x00000020,0x00000002,0x0000000b,0x00000003,
0x00050048,0x00000020,0x00000003,0x0000000b,0x00000004,0x00030047,0x00000020,0x00000002,0x00040047,0x00000026,0x0000000b,0x0000002a,
0x00040047,0x00000031,0x0000001e,0x00000000,0x00020013,0x00000002,0x00030021,0x00000003,0x00000002,0x00030016,0x00000006,0x00000020,
0x00040017,0x00000007,0x00000006,0x00000002,0x00040015,0x00000008,0x00000020,0x00000000,0x0004002b,0x00000008,0x00000009,0x00000003,
0x0004001c,0x0000000a,0x00000007,0x00000009,0x00040020,0x0000000b,0x00000006,0x0000000a,0x0004003b,0x0000000b,0x0000000c,0x00000006,
0x0004002b,0x00000006,0x0000000d,0x00000000,0x0004002b,0x00000006,0x0000000e,0xbf000000,0x0005002c,0x00000007,0x0000000f,0x0000000d,
0x0000000e,0x0004002b,0x00000006,0x00000010,0x3f000000,0x0005002c,0x00000007,0x00000011,0x00000010,0x00000010,0x0005002c,0x00000007,
0x00000012,0x0000000e,0x00000010,0x0006002c,0x0000000a,0x00000013,0x0000000f,0x00000011,0x00000012,0x00040017,0x00000014,0x00000006,
0x00000003,0x0004001c,0x00000015,0x00000014,0x00000009,0x00040020,0x00000016,0x00000006,0x00000015,0x0004003b,0x00000016,0x00000017,
0x00000006,0x0004002b,0x00000006,0x00000018,0x3f800000,0x0006002c,0x00000014,0x00000019,0x00000018,0x0000000d,0x0000000d,0x0006002c,
0x00000014,0x0000001a,0x0000000d,0x00000018,0x0000000d,0x0006002c,0x00000014,0x0000001b,0x0000000d,0x0000000d,0x00000018,0x0006002c,
0x00000015,0x0000001c,0x00000019,0x0000001a,0x0000001b,0x00040017,0x0000001d,0x00000006,0x00000004,0x0004002b,0x00000008,0x0000001e,
0x00000001,0x0004001c,0x0000001f,0x00000006,0x0000001e,0x0006001e,0x00000020,0x0000001d,0x00000006,0x0000001f,0x0000001f,0x00040020,
0x00000021,0x00000003,0x00000020,0x0004003b,0x00000021,0x00000022,0x00000003,0x00040015,0x00000023,0x00000020,0x00000001,0x0004002b,
0x00000023,0x00000024,0x00000000,0x00040020,0x00000025,0x00000001,0x00000023,0x0004003b,0x00000025,0x00000026,0x00000001,0x00040020,
0x00000028,0x00000006,0x00000007,0x00040020,0x0000002e,0x00000003,0x0000001d,0x00040020,0x00000030,0x00000003,0x00000014,0x0004003b,
0x00000030,0x00000031,0x00000003,0x00040020,0x00000033,0x00000006,0x00000014,0x00050036,0x00000002,0x00000004,0x00000000,0x00000003,
0x000200f8,0x00000005,0x0003003e,0x0000000c,0x00000013,0x0003003e,0x00000017,0x0000001c,0x0004003d,0x00000023,0x00000027,0x00000026,
0x00050041,0x00000028,0x00000029,0x0000000c,0x00000027,0x0004003d,0x00000007,0x0000002a,0x00000029,0x00050051,0x00000006,0x0000002b,
0x0000002a,0x00000000,0x00050051,0x00000006,0x0000002c,0x0000002a,0x00000001,0x00070050,0x0000001d,0x0000002d,0x0000002b,0x0000002c,
0x0000000d,0x00000018,0x00050041,0x0000002e,0x0000002f,0x00000022,0x00000024,0x0003003e,0x0000002f,0x0000002d,0x0004003d,0x00000023,
0x00000032,0x00000026,0x00050041,0x00000033,0x00000034,0x00000017,0x00000032,0x0004003d,0x00000014,0x00000035,0x00000034,0x0003003e,
0x00000031,0x00000035,0x000100fd,0x00010038
};
static const uint32_t fragment_shader[] = {
0x07230203,0x00010000,0x000d000b,0x00000013,0x00000000,0x00020011,0x00000001,0x0006000b,0x00000001,0x4c534c47,0x6474732e,0x3035342e,
0x00000000,0x0003000e,0x00000000,0x00000001,0x0007000f,0x00000004,0x00000004,0x6e69616d,0x00000000,0x00000009,0x0000000c,0x00030010,
0x00000004,0x00000007,0x00030003,0x00000002,0x000001c2,0x000a0004,0x475f4c47,0x4c474f4f,0x70635f45,0x74735f70,0x5f656c79,0x656e696c,
0x7269645f,0x69746365,0x00006576,0x00080004,0x475f4c47,0x4c474f4f,0x6e695f45,0x64756c63,0x69645f65,0x74636572,0x00657669,0x00040005,
0x00000004,0x6e69616d,0x00000000,0x00050005,0x00000009,0x4374756f,0x726f6c6f,0x00000000,0x00050005,0x0000000c,0x67617266,0x6f6c6f43,
0x00000072,0x00040047,0x00000009,0x0000001e,0x00000000,0x00040047,0x0000000c,0x0000001e,0x00000000,0x00020013,0x00000002,0x00030021,
0x00000003,0x00000002,0x00030016,0x00000006,0x00000020,0x00040017,0x00000007,0x00000006,0x00000004,0x00040020,0x00000008,0x00000003,
0x00000007,0x0004003b,0x00000008,0x00000009,0x00000003,0x00040017,0x0000000a,0x00000006,0x00000003,0x00040020,0x0000000b,0x00000001,
0x0000000a,0x0004003b,0x0000000b,0x0000000c,0x00000001,0x0004002b,0x00000006,0x0000000e,0x3f800000,0x00050036,0x00000002,0x00000004,
0x00000000,0x00000003,0x000200f8,0x00000005,0x0004003d,0x0000000a,0x0000000d,0x0000000c,0x00050051,0x00000006,0x0000000f,0x0000000d,
0x00000000,0x00050051,0x00000006,0x00000010,0x0000000d,0x00000001,0x00050051,0x00000006,0x00000011,0x0000000d,0x00000002,0x00070050,
0x00000007,0x00000012,0x0000000f,0x00000010,0x00000011,0x0000000e,0x0003003e,0x00000009,0x00000012,0x000100fd,0x00010038
};
VkDeviceMemory CreateAndBindMemoryToBuffer(VkPhysicalDevice physical_device, VkDevice device, VkBuffer buffer, VkMemoryPropertyFlags props)
{
VkMemoryRequirements requirements;
vkGetBufferMemoryRequirements(device, buffer, &requirements);
VkMemoryAllocateInfo alloc_info = {0};
alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
alloc_info.allocationSize = requirements.size;
alloc_info.memoryTypeIndex = kvfFindMemoryType(physical_device, requirements.memoryTypeBits, props);
VkDeviceMemory memory;
kvfCheckVk(vkAllocateMemory(device, &alloc_info, NULL, &memory));
kvfCheckVk(vkBindBufferMemory(device, buffer, memory, 0));
return memory;
}
VkDeviceMemory CreateAndBindMemoryToImage(VkPhysicalDevice physical_device, VkDevice device, VkImage image, VkMemoryPropertyFlags props)
{
VkMemoryRequirements requirements;
vkGetImageMemoryRequirements(device, image, &requirements);
VkMemoryAllocateInfo alloc_info = {0};
alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
alloc_info.allocationSize = requirements.size;
alloc_info.memoryTypeIndex = kvfFindMemoryType(physical_device, requirements.memoryTypeBits, props);
VkDeviceMemory memory;
kvfCheckVk(vkAllocateMemory(device, &alloc_info, NULL, &memory));
kvfCheckVk(vkBindImageMemory(device, image, memory, 0));
return memory;
}
int main(void)
{
volkInitialize();
void* lib = dlopen("./zig-out/lib/lib" LIBVK ".so", RTLD_NOW | RTLD_LOCAL);
if(!lib)
{
fprintf(stderr, "Could not open driver lib: %s\n", dlerror());
exit(EXIT_FAILURE);
}
puts("openned ./zig-out/lib/lib" LIBVK ".so");
VkDirectDriverLoadingInfoLUNARG direct_loading_info = {};
direct_loading_info.sType = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG;
direct_loading_info.pfnGetInstanceProcAddr = (PFN_vkGetInstanceProcAddrLUNARG)(dlsym(lib, "vk_icdGetInstanceProcAddr"));
VkDirectDriverLoadingListLUNARG direct_driver_list = {};
direct_driver_list.sType = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG;
direct_driver_list.mode = VK_DIRECT_DRIVER_LOADING_MODE_EXCLUSIVE_LUNARG;
direct_driver_list.driverCount = 1;
direct_driver_list.pDrivers = &direct_loading_info;
const char* extensions[] = { VK_LUNARG_DIRECT_DRIVER_LOADING_EXTENSION_NAME };
VkInstance instance = kvfCreateInstanceNext(extensions, 1, &direct_driver_list);
volkLoadInstance(instance);
VkPhysicalDevice physical_device = kvfPickGoodPhysicalDevice(instance, VK_NULL_HANDLE, NULL, 0);
VkDevice device = kvfCreateDevice(physical_device, NULL, 0, NULL);
volkLoadDevice(device);
VkImage image = kvfCreateImage(device, 600, 400, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_TILING_LINEAR, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, KVF_IMAGE_COLOR);
VkDeviceMemory memory = CreateAndBindMemoryToImage(physical_device, device, image, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
VkImageView image_view = kvfCreateImageView(device, image, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT, 1);
VkAttachmentDescription attachment = kvfBuildAttachmentDescription(KVF_IMAGE_COLOR, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, true, VK_SAMPLE_COUNT_1_BIT);
VkRenderPass renderpass = kvfCreateRenderPass(device, &attachment, 1, VK_PIPELINE_BIND_POINT_GRAPHICS);
VkFramebuffer framebuffer = kvfCreateFramebuffer(device, renderpass, &image_view, 1, (VkExtent2D){ .width = 600, .height = 400 });
VkShaderModule vertex_shader_module = kvfCreateShaderModule(device, (uint32_t*)vertex_shader, sizeof(vertex_shader) / sizeof(uint32_t));
VkShaderModule fragment_shader_module = kvfCreateShaderModule(device, (uint32_t*)fragment_shader, sizeof(fragment_shader) / sizeof(uint32_t));
VkPipelineLayout pipeline_layout = kvfCreatePipelineLayout(device, NULL, 0, NULL, 0);
KvfGraphicsPipelineBuilder* builder = kvfCreateGPipelineBuilder();
kvfGPipelineBuilderSetInputTopology(builder, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST);
kvfGPipelineBuilderSetPolygonMode(builder, VK_POLYGON_MODE_FILL, 1.0f);
kvfGPipelineBuilderSetCullMode(builder, VK_CULL_MODE_NONE, VK_FRONT_FACE_CLOCKWISE);
kvfGPipelineBuilderSetMultisampling(builder, VK_SAMPLE_COUNT_1_BIT);
kvfGPipelineBuilderAddShaderStage(builder, VK_SHADER_STAGE_VERTEX_BIT, vertex_shader_module, "main");
kvfGPipelineBuilderAddShaderStage(builder, VK_SHADER_STAGE_FRAGMENT_BIT, fragment_shader_module, "main");
kvfGPipelineBuilderDisableDepthTest(builder);
kvfGPipelineBuilderDisableBlending(builder);
VkPipeline pipeline = kvfCreateGraphicsPipeline(device, VK_NULL_HANDLE, pipeline_layout, builder, renderpass);
kvfDestroyGPipelineBuilder(builder);
kvfDestroyShaderModule(device, vertex_shader_module);
kvfDestroyShaderModule(device, fragment_shader_module);
VkCommandBuffer cmd = kvfCreateCommandBuffer(device);
kvfCheckVk(vkResetCommandBuffer(cmd, 0));
kvfBeginCommandBuffer(cmd, 0);
{
vkCmdBindPipeline(cmd, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline);
VkClearValue clear_color = {{{0.0f, 0.0f, 0.0f, 1.0f}}};
kvfBeginRenderPass(renderpass, cmd, framebuffer, (VkExtent2D){ .width = 600, .height = 400 }, &clear_color, 1);
VkViewport viewport = { 0 };
viewport.width = 600;
viewport.height = 400;
viewport.maxDepth = 1.0f;
vkCmdSetViewport(cmd, 0, 1, &viewport);
VkRect2D scissor = { 0 };
scissor.extent = (VkExtent2D){ .width = 600, .height = 400 };
vkCmdSetScissor(cmd, 0, 1, &scissor);
vkCmdDraw(cmd, 3, 1, 0, 0);
vkCmdEndRenderPass(cmd);
}
kvfEndCommandBuffer(cmd);
VkPipelineStageFlags wait_stages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT };
kvfSubmitCommandBuffer(device, cmd, KVF_GRAPHICS_QUEUE, VK_NULL_HANDLE, VK_NULL_HANDLE, VK_NULL_HANDLE, wait_stages);
vkDeviceWaitIdle(device);
void* map = NULL;
kvfCheckVk(vkMapMemory(device, memory, 0, VK_WHOLE_SIZE, 0, &map));
if(!stbi_write_png("res.png", 256, 256, 4, map, 256 * 4))
fprintf(stderr, "Failed to write result image to file\n");
vkUnmapMemory(device, memory);
vkDeviceWaitIdle(device);
kvfDestroyPipelineLayout(device, pipeline_layout);
kvfDestroyPipeline(device, pipeline);
kvfDestroyRenderPass(device, renderpass);
kvfDestroyImageView(device, image_view);
kvfDestroyImage(device, image);
vkFreeMemory(device, memory, NULL);
kvfDestroyFramebuffer(device, framebuffer);
kvfDestroyDevice(device);
kvfDestroyInstance(instance);
dlclose(lib);
return 0;
}