The open source OpenXR runtime
1// Copyright 2019-2023, Collabora, Ltd.
2// Copyright 2024-2025, NVIDIA CORPORATION.
3// SPDX-License-Identifier: BSL-1.0
4/*!
5 * @file
6 * @brief Simple debug image based target.
7 * @author Jakob Bornecrantz <tbornecrantz@nvidia.com>
8 * @ingroup comp_main
9 */
10
11#include "util/u_misc.h"
12#include "util/u_pacing.h"
13#include "util/u_pretty_print.h"
14
15#include "main/comp_window.h"
16
17
18/*
19 *
20 * Structs and defines.
21 *
22 */
23
24struct debug_image_target
25{
26 //! Base "class", so that we are a target the compositor can use.
27 struct comp_target base;
28
29 //! For error checking.
30 int64_t index;
31
32 //! Used to create the Vulkan resources, also manages index.
33 struct comp_scratch_single_images target;
34
35 /*!
36 * Storage for 'exported' images, these are pointed at by
37 * comt_target::images pointer in the @p base struct.
38 */
39 struct comp_target_image images[COMP_SCRATCH_NUM_IMAGES];
40
41 //! Compositor frame pacing helper.
42 struct u_pacing_compositor *upc;
43
44 // So we know we can free Vulkan resources safely.
45 bool has_init_vulkan;
46};
47
48
49/*
50 *
51 * Target members.
52 *
53 */
54
55static bool
56target_init_pre_vulkan(struct comp_target *ct)
57{
58 return true; // No-op
59}
60
61static bool
62target_init_post_vulkan(struct comp_target *ct, uint32_t preferred_width, uint32_t preferred_height)
63{
64 struct debug_image_target *dit = (struct debug_image_target *)ct;
65
66 // We now know Vulkan is running and we can use it.
67 dit->has_init_vulkan = true;
68
69 return true;
70}
71
72static bool
73target_check_ready(struct comp_target *ct)
74{
75 return true; // Always ready.
76}
77
78static void
79target_create_images(struct comp_target *ct, const struct comp_target_create_images_info *create_info)
80{
81 struct debug_image_target *dit = (struct debug_image_target *)ct;
82 struct vk_bundle *vk = &dit->base.c->base.vk;
83 bool use_unorm = false, use_srgb = false, maybe_convert = false;
84
85 // Paranoia.
86 assert(dit->has_init_vulkan);
87
88 /*
89 * Find the format we should use, since we are using the scratch images
90 * to allocate the images we only support the two formats it uses
91 * (listed below). We search for those breaking as soon as we find those
92 * and setting if the compositor wanted SRGB or UNORM. But we also look
93 * for two other commonly used formats, but continue searching for the
94 * other true formats.
95 *
96 * The format used by the scratch image is:
97 * - VK_FORMAT_R8G8B8A8_SRGB
98 * - VK_FORMAT_R8G8B8A8_UNORM
99 *
100 * The other formats used to determine SRGB vs UNORM:
101 * - VK_FORMAT_B8G8R8A8_SRGB
102 * - VK_FORMAT_B8G8R8A8_UNORM
103 */
104 for (uint32_t i = 0; i < create_info->format_count; i++) {
105 VkFormat format = create_info->formats[i];
106
107 // Used to figure out if we want SRGB or UNORM only.
108 if (!maybe_convert && format == VK_FORMAT_B8G8R8A8_UNORM) {
109 use_unorm = true;
110 maybe_convert = true;
111 continue; // Keep going, we might get better formats.
112 }
113 if (!maybe_convert && format == VK_FORMAT_B8G8R8A8_SRGB) {
114 use_srgb = true;
115 maybe_convert = true;
116 continue; // Keep going, we might get better formats.
117 }
118
119 // These two are what the scratch image allocates.
120 if (format == VK_FORMAT_R8G8B8A8_UNORM) {
121 use_unorm = true;
122 maybe_convert = false;
123 break; // Best match, stop searching.
124 }
125 if (format == VK_FORMAT_R8G8B8A8_SRGB) {
126 use_srgb = true;
127 maybe_convert = false;
128 break; // Best match, stop searching.
129 }
130 }
131
132 // Check
133 assert(use_unorm || use_srgb);
134 if (maybe_convert) {
135 COMP_WARN(ct->c, "Ignoring the format and picking something we use.");
136 }
137
138 // Do the allocation.
139 comp_scratch_single_images_ensure_mutable(&dit->target, vk, create_info->extent);
140
141 // Share the Vulkan handles of images and image views.
142 for (uint32_t i = 0; i < COMP_SCRATCH_NUM_IMAGES; i++) {
143 dit->images[i].handle = dit->target.images[i].image;
144 if (use_unorm) {
145 dit->images[i].view = dit->target.images[i].unorm_view;
146 }
147 if (use_srgb) {
148 dit->images[i].view = dit->target.images[i].srgb_view;
149 }
150 }
151
152 // Fill in exported data.
153 dit->base.image_count = COMP_SCRATCH_NUM_IMAGES;
154 dit->base.images = &dit->images[0];
155 dit->base.width = create_info->extent.width;
156 dit->base.height = create_info->extent.height;
157 if (use_unorm) {
158 dit->base.format = VK_FORMAT_R8G8B8A8_UNORM;
159 }
160 if (use_srgb) {
161 dit->base.format = VK_FORMAT_R8G8B8A8_SRGB;
162 }
163 dit->base.final_layout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
164}
165
166static bool
167target_has_images(struct comp_target *ct)
168{
169 struct debug_image_target *dit = (struct debug_image_target *)ct;
170
171 // Simple check.
172 return dit->base.images != NULL;
173}
174
175static VkResult
176target_acquire(struct comp_target *ct, uint32_t *out_index)
177{
178 struct debug_image_target *dit = (struct debug_image_target *)ct;
179
180 // Error checking.
181 assert(dit->index == -1);
182
183 uint32_t index = 0;
184 comp_scratch_single_images_get(&dit->target, &index);
185
186 // For error checking.
187 dit->index = index;
188
189 // Return the variable.
190 *out_index = index;
191
192 return VK_SUCCESS;
193}
194
195static VkResult
196target_present(struct comp_target *ct,
197 VkQueue queue,
198 uint32_t index,
199 uint64_t timeline_semaphore_value,
200 int64_t desired_present_time_ns,
201 int64_t present_slop_ns)
202{
203 struct debug_image_target *dit = (struct debug_image_target *)ct;
204
205 assert(index == dit->index);
206
207 comp_scratch_single_images_done(&dit->target);
208
209 // For error checking.
210 dit->index = -1;
211
212 return VK_SUCCESS;
213}
214
215static VkResult
216target_wait_for_present(struct comp_target *ct, time_duration_ns timeout_ns)
217{
218 return VK_ERROR_EXTENSION_NOT_PRESENT;
219}
220
221static void
222target_flush(struct comp_target *ct)
223{
224 // No-op
225}
226
227static void
228target_calc_frame_pacing(struct comp_target *ct,
229 int64_t *out_frame_id,
230 int64_t *out_wake_up_time_ns,
231 int64_t *out_desired_present_time_ns,
232 int64_t *out_present_slop_ns,
233 int64_t *out_predicted_display_time_ns)
234{
235 struct debug_image_target *dit = (struct debug_image_target *)ct;
236
237 int64_t frame_id = -1;
238 int64_t wake_up_time_ns = 0;
239 int64_t desired_present_time_ns = 0;
240 int64_t present_slop_ns = 0;
241 int64_t predicted_display_time_ns = 0;
242 int64_t predicted_display_period_ns = 0;
243 int64_t min_display_period_ns = 0;
244 int64_t now_ns = os_monotonic_get_ns();
245
246 u_pc_predict(dit->upc, //
247 now_ns, //
248 &frame_id, //
249 &wake_up_time_ns, //
250 &desired_present_time_ns, //
251 &present_slop_ns, //
252 &predicted_display_time_ns, //
253 &predicted_display_period_ns, //
254 &min_display_period_ns); //
255
256 *out_frame_id = frame_id;
257 *out_wake_up_time_ns = wake_up_time_ns;
258 *out_desired_present_time_ns = desired_present_time_ns;
259 *out_predicted_display_time_ns = predicted_display_time_ns;
260 *out_present_slop_ns = present_slop_ns;
261}
262
263static void
264target_mark_timing_point(struct comp_target *ct, enum comp_target_timing_point point, int64_t frame_id, int64_t when_ns)
265{
266 struct debug_image_target *dit = (struct debug_image_target *)ct;
267
268 switch (point) {
269 case COMP_TARGET_TIMING_POINT_WAKE_UP:
270 u_pc_mark_point(dit->upc, U_TIMING_POINT_WAKE_UP, frame_id, when_ns);
271 break;
272 case COMP_TARGET_TIMING_POINT_BEGIN: //
273 u_pc_mark_point(dit->upc, U_TIMING_POINT_BEGIN, frame_id, when_ns);
274 break;
275 case COMP_TARGET_TIMING_POINT_SUBMIT_BEGIN:
276 u_pc_mark_point(dit->upc, U_TIMING_POINT_SUBMIT_BEGIN, frame_id, when_ns);
277 break;
278 case COMP_TARGET_TIMING_POINT_SUBMIT_END:
279 u_pc_mark_point(dit->upc, U_TIMING_POINT_SUBMIT_END, frame_id, when_ns);
280 break;
281 default: assert(false);
282 }
283}
284
285static VkResult
286target_update_timings(struct comp_target *ct)
287{
288 return VK_SUCCESS; // No-op
289}
290
291static void
292target_info_gpu(struct comp_target *ct, int64_t frame_id, int64_t gpu_start_ns, int64_t gpu_end_ns, int64_t when_ns)
293{
294 struct debug_image_target *dit = (struct debug_image_target *)ct;
295
296 u_pc_info_gpu(dit->upc, frame_id, gpu_start_ns, gpu_end_ns, when_ns);
297}
298
299static void
300target_set_title(struct comp_target *ct, const char *title)
301{
302 // No-op
303}
304
305static void
306target_destroy(struct comp_target *ct)
307{
308 struct debug_image_target *dit = (struct debug_image_target *)ct;
309 struct vk_bundle *vk = &dit->base.c->base.vk;
310
311 // Do this first.
312 u_var_remove_root(dit);
313
314 // Can only allocate if we have Vulkan.
315 if (dit->has_init_vulkan) {
316 comp_scratch_single_images_free(&dit->target, vk);
317 dit->has_init_vulkan = false;
318 dit->base.image_count = 0;
319 dit->base.images = NULL;
320 dit->base.width = 0;
321 dit->base.height = 0;
322 dit->base.format = VK_FORMAT_UNDEFINED;
323 dit->base.final_layout = VK_IMAGE_LAYOUT_UNDEFINED;
324 }
325
326 // Always free non-Vulkan resources.
327 comp_scratch_single_images_destroy(&dit->target);
328
329 // Pacing is always created.
330 u_pc_destroy(&dit->upc);
331
332 // Free memory.
333 free(dit);
334}
335
336struct comp_target *
337target_create(struct comp_compositor *c)
338{
339 struct debug_image_target *dit = U_TYPED_CALLOC(struct debug_image_target);
340
341 dit->base.name = "debug_image";
342 dit->base.init_pre_vulkan = target_init_pre_vulkan;
343 dit->base.init_post_vulkan = target_init_post_vulkan;
344 dit->base.check_ready = target_check_ready;
345 dit->base.create_images = target_create_images;
346 dit->base.has_images = target_has_images;
347 dit->base.acquire = target_acquire;
348 dit->base.present = target_present;
349 dit->base.wait_for_present = target_wait_for_present;
350 dit->base.flush = target_flush;
351 dit->base.calc_frame_pacing = target_calc_frame_pacing;
352 dit->base.mark_timing_point = target_mark_timing_point;
353 dit->base.update_timings = target_update_timings;
354 dit->base.info_gpu = target_info_gpu;
355 dit->base.set_title = target_set_title;
356 dit->base.destroy = target_destroy;
357 dit->base.c = c;
358
359 dit->base.wait_for_present_supported = false;
360
361 // Create the pacer.
362 uint64_t now_ns = os_monotonic_get_ns();
363 u_pc_fake_create(c->settings.nominal_frame_interval_ns, now_ns, &dit->upc);
364
365 // Only inits locking, Vulkan resources inited later.
366 comp_scratch_single_images_init(&dit->target);
367
368 // For error checking.
369 dit->index = -1;
370
371 // Variable tracking.
372 u_var_add_root(dit, "Compositor output", true);
373 u_var_add_native_images_debug(dit, &dit->target.unid, "Image");
374
375 return &dit->base;
376}
377
378
379/*
380 *
381 * Factory
382 *
383 */
384
385static bool
386factory_detect(const struct comp_target_factory *ctf, struct comp_compositor *c)
387{
388 return false;
389}
390
391static bool
392factory_create_target(const struct comp_target_factory *ctf, struct comp_compositor *c, struct comp_target **out_ct)
393{
394 struct comp_target *ct = target_create(c);
395 if (ct == NULL) {
396 return false;
397 }
398
399 COMP_INFO(c,
400 "\n################################################################################\n"
401 "# Debug image target used, if you wanted to see something in your headset #\n"
402 "# something is probably wrong with your setup, sorry. #\n"
403 "################################################################################");
404
405 *out_ct = ct;
406
407 return true;
408}
409
410const struct comp_target_factory comp_target_factory_debug_image = {
411 .name = "Debug Image",
412 .identifier = "debug_image",
413 .requires_vulkan_for_create = false,
414 .is_deferred = false,
415 .required_instance_version = 0,
416 .required_instance_extensions = NULL,
417 .required_instance_extension_count = 0,
418 .optional_device_extensions = NULL,
419 .optional_device_extension_count = 0,
420 .detect = factory_detect,
421 .create_target = factory_create_target,
422};