Put Vulkan in your GTK with
VkArea
1const std = @import("std");
2const builtin = @import("builtin");
3
4const gobject = @import("gobject");
5const glib = @import("glib");
6const gio = @import("gio");
7const gtk = @import("gtk");
8const gdk = @import("gdk");
9const vk = @import("vulkan");
10
11const log = std.log.scoped(.vk_area);
12
13const vkGetInstanceProcAddr = @extern(vk.PfnGetInstanceProcAddr, .{
14 .name = "vkGetInstanceProcAddr",
15 .library_name = "vulkan",
16});
17
18/// Target rendering format
19pub const format: vk.Format = .b8g8r8a8_srgb;
20
21pub const VkArea = extern struct {
22 parent: Parent,
23 const Self = @This();
24
25 pub const Parent = gtk.Widget;
26 pub const getGObjectType = gobject.ext.defineClass(Self, .{
27 .instanceInit = init,
28 .classInit = Class.init,
29 .parent_class = &Class.parent,
30 .private = .{ .Type = Private, .offset = &Private.offset },
31 });
32
33 pub const signals = struct {
34 pub const setup = struct {
35 pub const name = "setup";
36 pub const connect = impl.connect;
37 const impl = gobject.ext.defineSignal(
38 name,
39 Self,
40 &.{*const RenderContext},
41 void,
42 );
43 };
44 pub const teardown = struct {
45 pub const name = "teardown";
46 pub const connect = impl.connect;
47 const impl = gobject.ext.defineSignal(
48 name,
49 Self,
50 &.{*const RenderContext},
51 void,
52 );
53 };
54 pub const render = struct {
55 pub const name = "render";
56 pub const connect = impl.connect;
57 const impl = gobject.ext.defineSignal(
58 name,
59 Self,
60 &.{*const RenderContext},
61 void,
62 );
63 };
64 pub const resize = struct {
65 pub const name = "resize";
66 pub const connect = impl.connect;
67 const impl = gobject.ext.defineSignal(
68 name,
69 Self,
70 &.{
71 c_uint, // Width
72 c_uint, // Height
73 },
74 void,
75 );
76 };
77 };
78
79 const Private = struct {
80 arena: std.heap.ArenaAllocator,
81 instance_extensions: std.ArrayList([*:0]const u8),
82 device_extensions: std.ArrayList([*:0]const u8),
83 validation_layers: std.ArrayList([*:0]const u8),
84
85 // Dynamic dispatch tables for Vulkan objects.
86 //
87 // Proxy objects rely on them having a static address (i.e. being
88 // pinned), so we must place them as private fields.
89 vkb: vk.BaseWrapper,
90 vki: vk.InstanceWrapper,
91 vkd: vk.DeviceWrapper,
92
93 instance: vk.InstanceProxy,
94 device: vk.DeviceProxy,
95
96 physical_device: vk.PhysicalDevice,
97 pool: vk.CommandPool,
98 cmds: vk.CommandBuffer,
99 target: Target,
100
101 graphics_queue: vk.Queue,
102 graphics_queue_family: u32,
103 drm_props: vk.DrmFormatModifierPropertiesEXT,
104
105 extent: vk.Extent2D,
106 needs_resize: bool,
107 pub var offset: c_int = 0;
108 };
109
110 //-------------------------------------------------------------------------
111 // Public methods
112
113 pub fn new() *Self {
114 return gobject.ext.newInstance(Self, .{});
115 }
116
117 pub fn addInstanceExtensions(self: *Self, extensions: []const [*:0]const u8) !void {
118 const priv = self.private();
119 try priv.instance_extensions.appendSlice(
120 priv.arena.allocator(),
121 extensions,
122 );
123 }
124
125 pub fn addDeviceExtensions(self: *Self, extensions: []const [*:0]const u8) !void {
126 const priv = self.private();
127 try priv.device_extensions.appendSlice(
128 priv.arena.allocator(),
129 extensions,
130 );
131 }
132
133 pub fn addValidationLayers(self: *Self, layers: []const [*:0]const u8) !void {
134 const priv = self.private();
135 try priv.validation_layers.appendSlice(
136 priv.arena.allocator(),
137 layers,
138 );
139 }
140
141 //-------------------------------------------------------------------------
142 // Lifecycle methods
143
144 fn init(self: *Self, _: *Class) callconv(.c) void {
145 const priv = self.private();
146
147 priv.* = .{
148 .arena = .init(std.heap.c_allocator),
149 .needs_resize = false,
150
151 .vkb = .load(vkGetInstanceProcAddr),
152 .vki = undefined,
153 .vkd = undefined,
154
155 .instance_extensions = .empty,
156 .device_extensions = .empty,
157 .validation_layers = .empty,
158
159 // The rest will be initialized when realized.
160 .instance = undefined,
161 .device = undefined,
162
163 .physical_device = .null_handle,
164 .pool = .null_handle,
165 .cmds = .null_handle,
166 .target = .{},
167
168 .graphics_queue = .null_handle,
169 .graphics_queue_family = vk.QUEUE_FAMILY_IGNORED,
170 .drm_props = undefined,
171 .extent = .{ .width = 0, .height = 0 },
172 };
173 }
174
175 fn finalize(self: *Self) callconv(.c) void {
176 const priv = self.private();
177
178 // Note that we do not try to deinit Vulkan devices here,
179 // since they should be cleaned up when the surface is unrealized.
180 priv.arena.deinit();
181
182 gobject.Object.virtual_methods.finalize.call(Class.parent, self);
183 }
184
185 //-------------------------------------------------------------------------
186 // Virtual method implementations
187
188 fn realize(self: *Self) callconv(.c) void {
189 const priv = self.private();
190 gtk.Widget.virtual_methods.realize.call(
191 Class.parent,
192 self.as(gtk.Widget),
193 );
194
195 const widget = self.as(gtk.Widget);
196 const scale = widget.getScaleFactor();
197 priv.extent = .{
198 .width = @intCast(widget.getWidth() * scale),
199 .height = @intCast(widget.getHeight() * scale),
200 };
201
202 // Setup Vulkan instance
203 self.vkInit() catch |err| {
204 log.err("failed to initialize vulkan={}", .{err});
205 };
206
207 const ctx: RenderContext = .{
208 .cmds = priv.cmds,
209 .device = &priv.device,
210 .extent = priv.extent,
211 .target = priv.target,
212 };
213 signals.setup.impl.emit(self, null, .{&ctx}, null);
214 }
215 fn unrealize(self: *Self) callconv(.c) void {
216 const priv = self.private();
217
218 priv.device.deviceWaitIdle() catch |err| {
219 log.err("failed to wait for device to become idle={}", .{err});
220 };
221
222 const ctx: RenderContext = .{
223 .cmds = priv.cmds,
224 .device = &priv.device,
225 .extent = priv.extent,
226 .target = priv.target,
227 };
228 signals.teardown.impl.emit(self, null, .{&ctx}, null);
229
230 priv.target.deinit();
231 priv.device.freeCommandBuffers(priv.pool, 1, &.{priv.cmds});
232 priv.device.destroyCommandPool(priv.pool, null);
233 priv.device.destroyDevice(null);
234 priv.instance.destroyInstance(null);
235
236 gtk.Widget.virtual_methods.unrealize.call(
237 Class.parent,
238 self.as(gtk.Widget),
239 );
240 }
241 fn sizeAllocate(self: *Self, width: c_int, height: c_int, baseline: c_int) callconv(.c) void {
242 gtk.Widget.virtual_methods.size_allocate.call(
243 Class.parent,
244 self.as(gtk.Widget),
245 width,
246 height,
247 baseline,
248 );
249
250 if (self.as(gtk.Widget).getRealized() != 0) {
251 self.private().needs_resize = true;
252 }
253 }
254 fn snapshot(self: *Self, snap: *gtk.Snapshot) callconv(.c) void {
255 const priv = self.private();
256
257 const were_resized = priv.needs_resize;
258 const was_empty = priv.extent.width == 0 or priv.extent.height == 0;
259 if (priv.needs_resize) {
260 const widget = self.as(gtk.Widget);
261 const scale = widget.getScaleFactor();
262 priv.extent = .{
263 .width = @intCast(widget.getWidth() * scale),
264 .height = @intCast(widget.getHeight() * scale),
265 };
266 log.debug("resized w={} h={}", .{ priv.extent.width, priv.extent.height });
267
268 signals.resize.impl.emit(
269 self,
270 null,
271 .{ priv.extent.width, priv.extent.height },
272 null,
273 );
274 priv.needs_resize = false;
275 }
276
277 if (priv.extent.width == 0 or priv.extent.height == 0) return;
278
279 self.present(were_resized, was_empty) catch |err| {
280 log.err("failed to present={}", .{err});
281 return;
282 };
283
284 // Output framebuffer to DMABUF texture
285 const texture = self.exportDmabufTexture() catch |err| {
286 log.err("failed to export={}", .{err});
287 return;
288 };
289
290 // Add frame to snapshot
291 snap.save();
292 snap.appendTexture(texture.?, &.{
293 // snap.appendColor(&.{
294 // .f_red = 127,
295 // .f_green = 0,
296 // .f_blue = 0,
297 // .f_alpha = 255,
298 // }, &.{
299 .f_origin = .{ .f_x = 0, .f_y = 0 },
300 .f_size = .{
301 .f_width = @floatFromInt(priv.extent.width),
302 .f_height = @floatFromInt(priv.extent.height),
303 },
304 });
305 snap.restore();
306 }
307
308 //-------------------------------------------------------------------------
309 // Private methods
310 fn isRealized(self: *Self) bool {
311 return self.as(gtk.Widget).getRealized() != 0;
312 }
313
314 fn vkInit(self: *Self) !void {
315 const priv = self.private();
316
317 // This arena is DIFFERENT from priv.arena!
318 // priv.arena lasts through the entire existence of the VkArea,
319 // meanwhile this one is solely used to cache objects returned from
320 // the initialization process.
321 var arena: std.heap.ArenaAllocator = .init(priv.arena.child_allocator);
322 defer arena.deinit();
323 const alloc = arena.allocator();
324
325 // var old_arena = priv.arena.state;
326 // defer priv.arena = old_arena.promote(priv.arena.child_allocator);
327 // const alloc = priv.arena.allocator();
328
329 // Validation layers
330 {
331 const found_layers = try priv.vkb.enumerateInstanceLayerPropertiesAlloc(alloc);
332
333 layer: for (priv.validation_layers.items) |required_layer| {
334 for (found_layers) |found_layer| {
335 if (std.mem.eql(
336 u8,
337 std.mem.sliceTo(&found_layer.layer_name, 0),
338 std.mem.span(required_layer),
339 )) continue :layer;
340 }
341 return error.ValidationLayerNotFound;
342 }
343 }
344
345 // Initialize instance
346 const inst = try priv.vkb.createInstance(&.{
347 .p_application_info = &.{
348 .p_application_name = "Hello Vulkan",
349 .application_version = @bitCast(vk.makeApiVersion(0, 0, 0, 0)),
350 .p_engine_name = "VkArea",
351 .engine_version = @bitCast(vk.makeApiVersion(0, 0, 0, 0)),
352 .api_version = @bitCast(vk.features.version_1_3.version),
353 },
354 .enabled_extension_count = @intCast(priv.instance_extensions.items.len),
355 .pp_enabled_extension_names = priv.instance_extensions.items.ptr,
356 .enabled_layer_count = @intCast(priv.validation_layers.items.len),
357 .pp_enabled_layer_names = priv.validation_layers.items.ptr,
358 }, null);
359
360 priv.vki = .load(inst, priv.vkb.dispatch.vkGetInstanceProcAddr.?);
361 priv.instance = .init(inst, &priv.vki);
362 errdefer priv.instance.destroyInstance(null);
363
364 log.debug("instance initialized", .{});
365
366 // Add required device extensions
367 try priv.device_extensions.appendSlice(priv.arena.allocator(), &.{
368 // Required for exporting render results to GTK
369 vk.extensions.khr_external_memory_fd.name,
370 vk.extensions.ext_external_memory_dma_buf.name,
371 vk.extensions.ext_image_drm_format_modifier.name,
372 });
373
374 // Choose and initialize a device
375 var found_device = false;
376 dev: for (try priv.instance.enumeratePhysicalDevicesAlloc(alloc)) |pdev| {
377 // Check if the device supports the required Vulkan version
378 const props = priv.instance.getPhysicalDeviceProperties(pdev);
379 const required_version: u32 = @bitCast(vk.features.version_1_3.version);
380 if (props.api_version < required_version) continue :dev;
381
382 // Check if the device supports all required extensions
383 const ext_props = try priv.instance.enumerateDeviceExtensionPropertiesAlloc(
384 pdev,
385 null,
386 alloc,
387 );
388 for (priv.device_extensions.items) |required_ext| {
389 for (ext_props) |found_ext| {
390 if (std.mem.eql(
391 u8,
392 std.mem.sliceTo(&found_ext.extension_name, 0),
393 std.mem.span(required_ext),
394 )) break;
395 } else continue :dev;
396 }
397
398 // Check for minimum image properties requirements (e.g. max extent)
399 {
400 const info: vk.PhysicalDeviceImageFormatInfo2 = .{
401 .format = format,
402 .type = .@"2d",
403 .tiling = .optimal,
404 .usage = .{
405 .transfer_dst_bit = true,
406 },
407 };
408 var out: vk.ImageFormatProperties2 = .{
409 .image_format_properties = undefined,
410 };
411 try priv.instance.getPhysicalDeviceImageFormatProperties2(
412 pdev,
413 &info,
414 &out,
415 );
416
417 const max_extent = out.image_format_properties.max_extent;
418 if (max_extent.width < priv.extent.width or
419 max_extent.height < priv.extent.height) break :dev;
420 }
421
422 // Allocate queue families
423 const families = try priv.instance.getPhysicalDeviceQueueFamilyPropertiesAlloc(
424 pdev,
425 alloc,
426 );
427
428 var graphics_family: ?u32 = null;
429 for (families, 0..) |p, i| {
430 if (graphics_family == null and p.queue_flags.graphics_bit) {
431 graphics_family = @intCast(i);
432 }
433 }
434
435 // All criteria met.
436 priv.graphics_queue_family = graphics_family orelse continue :dev;
437 priv.physical_device = pdev;
438
439 const qci = [_]vk.DeviceQueueCreateInfo{.{
440 .queue_family_index = priv.graphics_queue_family,
441 .queue_count = 1,
442 .p_queue_priorities = &.{1},
443 }};
444
445 var features_1_3: vk.PhysicalDeviceVulkan13Features = .{
446 .dynamic_rendering = .true,
447 .synchronization_2 = .true,
448 };
449 var features_1_1: vk.PhysicalDeviceVulkan11Features = .{
450 .shader_draw_parameters = .true,
451 .p_next = @ptrCast(&features_1_3),
452 };
453 const features: vk.PhysicalDeviceFeatures2 = .{
454 .p_next = @ptrCast(&features_1_1),
455 .features = .{},
456 };
457
458 const device = try priv.instance.createDevice(priv.physical_device, &.{
459 .queue_create_info_count = qci.len,
460 .p_queue_create_infos = &qci,
461 .enabled_extension_count = @intCast(priv.device_extensions.items.len),
462 .pp_enabled_extension_names = priv.device_extensions.items.ptr,
463 .p_next = @ptrCast(&features),
464 }, null);
465
466 priv.vkd = .load(device, priv.vki.dispatch.vkGetDeviceProcAddr.?);
467 priv.device = .init(device, &priv.vkd);
468 priv.graphics_queue = priv.device.getDeviceQueue(priv.graphics_queue_family, 0);
469
470 found_device = true;
471 break;
472 }
473 if (!found_device) return error.NoDevice;
474
475 errdefer priv.device.destroyDevice(null);
476 log.debug("device initialized", .{});
477
478 // Ask for DRM modifiers
479 {
480 var modifiers: [1]vk.DrmFormatModifierPropertiesEXT = undefined;
481 var drm_out: vk.DrmFormatModifierPropertiesListEXT = .{
482 .drm_format_modifier_count = modifiers.len,
483 .p_drm_format_modifier_properties = &modifiers,
484 };
485 var out: vk.FormatProperties2 = .{
486 .format_properties = undefined,
487 .p_next = @ptrCast(&drm_out),
488 };
489 priv.instance.getPhysicalDeviceFormatProperties2(
490 priv.physical_device,
491 format,
492 &out,
493 );
494 priv.drm_props = modifiers[0];
495 }
496
497 // Initialize command buffer & pool
498 priv.pool = try priv.device.createCommandPool(&.{
499 .queue_family_index = priv.graphics_queue_family,
500 .flags = .{
501 .reset_command_buffer_bit = true,
502 },
503 }, null);
504 log.debug("command pool initialized", .{});
505
506 var buf: [1]vk.CommandBuffer = undefined;
507 try priv.device.allocateCommandBuffers(&.{
508 .command_pool = priv.pool,
509 .level = .primary,
510 .command_buffer_count = buf.len,
511 }, &buf);
512 priv.cmds = buf[0];
513
514 log.debug("command buffer initialized={any}", .{buf});
515 }
516
517 fn present(self: *Self, were_resized: bool, was_empty: bool) !void {
518 const priv = self.private();
519
520 if (were_resized) {
521 try priv.device.deviceWaitIdle();
522 if (!was_empty) priv.target.deinit();
523 priv.target = try .init(
524 &priv.instance,
525 priv.physical_device,
526 &priv.device,
527 priv.extent,
528 &.{priv.graphics_queue_family},
529 priv.drm_props,
530 );
531 }
532
533 // Draw!
534 try priv.device.beginCommandBuffer(priv.cmds, &.{});
535
536 {
537 // Transition into rendering layout
538 priv.target.transition(
539 priv.cmds,
540 .undefined,
541 .color_attachment_optimal,
542 .{ .top_of_pipe_bit = true },
543 .{ .color_attachment_output_bit = true },
544 .{},
545 .{ .color_attachment_write_bit = true },
546 );
547
548 const ctx: RenderContext = .{
549 .cmds = priv.cmds,
550 .device = &priv.device,
551 .target = priv.target,
552 .extent = priv.extent,
553 };
554 signals.render.impl.emit(self, null, .{&ctx}, null);
555
556 // Transition into exporting layout
557 priv.target.transition(
558 priv.cmds,
559 .color_attachment_optimal,
560 .color_attachment_optimal,
561 .{ .color_attachment_output_bit = true },
562 .{ .bottom_of_pipe_bit = true },
563 .{ .color_attachment_write_bit = true },
564 .{},
565 );
566 }
567 try priv.device.endCommandBuffer(priv.cmds);
568
569 // Wait for GPU
570 try priv.target.waitForFence();
571 try priv.device.resetFences(1, &.{priv.target.fence});
572
573 const wait_stage = [_]vk.PipelineStageFlags{
574 .{ .top_of_pipe_bit = true },
575 };
576 try priv.device.queueSubmit(
577 priv.graphics_queue,
578 1,
579 &.{.{
580 .command_buffer_count = 1,
581 .p_command_buffers = &.{priv.cmds},
582 .p_wait_dst_stage_mask = &wait_stage,
583 }},
584 priv.target.fence,
585 );
586
587 try priv.device.queueWaitIdle(priv.graphics_queue);
588 }
589
590 pub fn getExtent(self: *Self) vk.Extent2D {
591 return self.private().extent;
592 }
593
594 fn exportDmabufTexture(self: *Self) !?*gdk.Texture {
595 const priv = self.private();
596
597 const fd = try priv.device.getMemoryFdKHR(&.{
598 .memory = priv.target.memory,
599 .handle_type = .{ .dma_buf_bit_ext = true },
600 });
601
602 const dmabuf = gdk.DmabufTextureBuilder.new();
603 dmabuf.setDisplay(self.as(gtk.Widget).getDisplay());
604 dmabuf.setWidth(priv.extent.width);
605 dmabuf.setHeight(priv.extent.height);
606 dmabuf.setPremultiplied(@intFromBool(true));
607
608 const fourcc = std.mem.bytesToValue(u32, "AR24");
609 dmabuf.setFourcc(fourcc);
610 dmabuf.setModifier(priv.drm_props.drm_format_modifier);
611 dmabuf.setNPlanes(priv.drm_props.drm_format_modifier_plane_count);
612 for (0..priv.drm_props.drm_format_modifier_plane_count) |i| {
613 const plane: c_uint = @intCast(i);
614
615 const layout = priv.device.getImageSubresourceLayout(priv.target.image, &.{
616 .mip_level = 0,
617 .array_layer = @intCast(i),
618 .aspect_mask = .{
619 .memory_plane_0_bit_ext = i == 0,
620 .memory_plane_1_bit_ext = i == 1,
621 .memory_plane_2_bit_ext = i == 2,
622 .memory_plane_3_bit_ext = i == 3,
623 },
624 });
625
626 dmabuf.setFd(plane, fd);
627 dmabuf.setOffset(plane, @intCast(layout.offset));
628 dmabuf.setStride(plane, @intCast(layout.row_pitch));
629 }
630
631 const a = try std.heap.c_allocator.create(std.posix.fd_t);
632 a.* = fd;
633 return dmabuf.build(releaseDmabuf, a, null);
634 }
635
636 fn releaseDmabuf(ud: ?*anyopaque) callconv(.c) void {
637 const a: *std.posix.fd_t = @ptrCast(@alignCast(ud));
638 std.posix.close(a.*);
639 std.heap.c_allocator.destroy(a);
640 }
641
642 pub const RenderContext = struct {
643 device: *vk.DeviceProxy,
644 cmds: vk.CommandBuffer,
645 target: Target,
646 extent: vk.Extent2D,
647
648 pub const getGObjectType = gobject.ext.defineBoxed(RenderContext, .{
649 .name = "GtkVkAreaRenderContext",
650 });
651 };
652
653 const Target = struct {
654 device: *vk.DeviceProxy = undefined,
655 image: vk.Image = .null_handle,
656 memory: vk.DeviceMemory = .null_handle,
657 view: vk.ImageView = .null_handle,
658 fence: vk.Fence = .null_handle,
659
660 fn init(
661 instance: *vk.InstanceProxy,
662 pdev: vk.PhysicalDevice,
663 device: *vk.DeviceProxy,
664 extent: vk.Extent2D,
665 queue_families: []const u32,
666 drm_props: vk.DrmFormatModifierPropertiesEXT,
667 ) !Target {
668 const drm_format_info: vk.ImageDrmFormatModifierListCreateInfoEXT = .{
669 .drm_format_modifier_count = 1,
670 .p_drm_format_modifiers = &.{drm_props.drm_format_modifier},
671 };
672
673 const ext_mem_image_create_info: vk.ExternalMemoryImageCreateInfo = .{
674 .handle_types = .{ .dma_buf_bit_ext = true },
675 .p_next = @ptrCast(&drm_format_info),
676 };
677
678 const image = try device.createImage(&.{
679 .image_type = .@"2d",
680 .format = format,
681 .extent = .{
682 .width = extent.width,
683 .height = extent.height,
684 .depth = 1,
685 },
686 .mip_levels = 1,
687 .array_layers = 1,
688 .samples = .{ .@"1_bit" = true },
689 .tiling = .drm_format_modifier_ext,
690 .usage = .{
691 .color_attachment_bit = true,
692 .transfer_src_bit = true,
693 },
694 .sharing_mode = .exclusive,
695 .queue_family_index_count = @intCast(queue_families.len),
696 .p_queue_family_indices = queue_families.ptr,
697 .initial_layout = .undefined,
698 .p_next = @ptrCast(&ext_mem_image_create_info),
699 }, null);
700
701 const mem_req = device.getImageMemoryRequirements(image);
702
703 var mem_props_list: vk.PhysicalDeviceMemoryProperties2 = .{
704 .memory_properties = undefined,
705 };
706 instance.getPhysicalDeviceMemoryProperties2(
707 pdev,
708 &mem_props_list,
709 );
710 const mem_props = mem_props_list.memory_properties;
711
712 const memory_type_idx = for (0..mem_props.memory_type_count) |i| {
713 if (mem_props.memory_types[i].property_flags.device_local_bit) break i;
714 } else return error.NoSuitableMemoryType;
715
716 const export_mem_alloc_info: vk.ExportMemoryAllocateInfo = .{
717 .handle_types = .{ .dma_buf_bit_ext = true },
718 };
719
720 const memory = try device.allocateMemory(&.{
721 .allocation_size = mem_req.size,
722 .memory_type_index = @intCast(memory_type_idx),
723 .p_next = @ptrCast(&export_mem_alloc_info),
724 }, null);
725
726 try device.bindImageMemory(image, memory, 0);
727
728 const view = try device.createImageView(&.{
729 .image = image,
730 .view_type = .@"2d",
731 .format = format,
732 .components = .{
733 .r = .identity,
734 .g = .identity,
735 .b = .identity,
736 .a = .identity,
737 },
738 .subresource_range = .{
739 .aspect_mask = .{
740 .color_bit = true,
741 },
742 .base_mip_level = 0,
743 .level_count = 1,
744 .base_array_layer = 0,
745 .layer_count = 1,
746 },
747 }, null);
748
749 const fence = try device.createFence(&.{
750 .flags = .{ .signaled_bit = true },
751 }, null);
752
753 return .{
754 .device = device,
755 .image = image,
756 .memory = memory,
757 .view = view,
758 .fence = fence,
759 };
760 }
761
762 fn waitForFence(self: Target) !void {
763 _ = try self.device.waitForFences(1, &.{self.fence}, .true, std.math.maxInt(u64));
764 }
765
766 fn deinit(self: Target) void {
767 self.waitForFence() catch {};
768 self.device.destroyImageView(self.view, null);
769 self.device.destroyImage(self.image, null);
770 self.device.destroyFence(self.fence, null);
771 self.device.freeMemory(self.memory, null);
772 }
773
774 fn transition(
775 self: Target,
776 cmds: vk.CommandBuffer,
777 old_layout: vk.ImageLayout,
778 new_layout: vk.ImageLayout,
779 src_stage_mask: vk.PipelineStageFlags2,
780 dst_stage_mask: vk.PipelineStageFlags2,
781 src_access_mask: vk.AccessFlags2,
782 dst_access_mask: vk.AccessFlags2,
783 ) void {
784 const barriers = [_]vk.ImageMemoryBarrier2{.{
785 .src_stage_mask = src_stage_mask,
786 .src_access_mask = src_access_mask,
787 .dst_stage_mask = dst_stage_mask,
788 .dst_access_mask = dst_access_mask,
789 .old_layout = old_layout,
790 .new_layout = new_layout,
791 .src_queue_family_index = vk.QUEUE_FAMILY_IGNORED,
792 .dst_queue_family_index = vk.QUEUE_FAMILY_IGNORED,
793 .image = self.image,
794 .subresource_range = .{
795 .aspect_mask = .{ .color_bit = true },
796 .base_mip_level = 0,
797 .level_count = 1,
798 .base_array_layer = 0,
799 .layer_count = 1,
800 },
801 }};
802 self.device.cmdPipelineBarrier2(cmds, &.{
803 .dependency_flags = .{},
804 .image_memory_barrier_count = barriers.len,
805 .p_image_memory_barriers = &barriers,
806 });
807 }
808 };
809
810 //-------------------------------------------------------------------------
811 // Boilerplate
812
813 pub fn as(self: *Self, comptime T: type) *T {
814 return gobject.ext.as(T, self);
815 }
816 fn private(self: *Self) *Private {
817 return gobject.ext.impl_helpers.getPrivate(self, Private, Private.offset);
818 }
819 pub const Class = extern struct {
820 parent_class: Parent.Class,
821 var parent: *Parent.Class = undefined;
822 pub const Instance = Self;
823
824 pub fn init(class: *Class) callconv(.c) void {
825 signals.setup.impl.register(.{});
826 signals.teardown.impl.register(.{});
827 signals.render.impl.register(.{});
828 signals.resize.impl.register(.{});
829
830 // gobject.ext.registerProperties(class, &.{
831 // properties.instance_extensions.impl,
832 // });
833
834 gtk.Widget.virtual_methods.realize.implement(class, realize);
835 gtk.Widget.virtual_methods.unrealize.implement(class, unrealize);
836 gtk.Widget.virtual_methods.size_allocate.implement(class, sizeAllocate);
837 gtk.Widget.virtual_methods.snapshot.implement(class, snapshot);
838 }
839 };
840};