The open source OpenXR runtime
1// Copyright 2019-2023, Collabora, Ltd.
2// SPDX-License-Identifier: BSL-1.0
3/*!
4 * @file
5 * @brief Code for handling distortion resources (not shaders).
6 * @author Jakob Bornecrantz <jakob@collabora.com>
7 * @ingroup comp_render
8 */
9
10#include "xrt/xrt_device.h"
11
12#include "math/m_api.h"
13#include "math/m_matrix_2x2.h"
14#include "math/m_vec2.h"
15
16#include "vk/vk_mini_helpers.h"
17
18#include "render/render_interface.h"
19
20
21/*
22 *
23 * Helper functions.
24 *
25 */
26
27XRT_CHECK_RESULT static VkResult
28create_distortion_image_and_view(struct vk_bundle *vk,
29 VkExtent2D extent,
30 VkDeviceMemory *out_device_memory,
31 VkImage *out_image,
32 VkImageView *out_image_view)
33{
34 VkFormat format = VK_FORMAT_R32G32_SFLOAT;
35 VkImage image = VK_NULL_HANDLE;
36 VkDeviceMemory device_memory = VK_NULL_HANDLE;
37 VkImageView image_view = VK_NULL_HANDLE;
38 VkImageViewType view_type = VK_IMAGE_VIEW_TYPE_2D;
39 VkResult ret;
40
41 ret = vk_create_image_simple( //
42 vk, // vk_bundle
43 extent, // extent
44 format, // format
45 VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, // usage
46 &device_memory, // out_device_memory
47 &image); // out_image
48 VK_CHK_AND_RET(ret, "vk_create_image_simple");
49
50 VK_NAME_DEVICE_MEMORY(vk, device_memory, "distortion device_memory");
51 VK_NAME_IMAGE(vk, image, "distortion image");
52
53 VkImageSubresourceRange subresource_range = {
54 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
55 .baseMipLevel = 0,
56 .levelCount = VK_REMAINING_MIP_LEVELS,
57 .baseArrayLayer = 0,
58 .layerCount = VK_REMAINING_ARRAY_LAYERS,
59 };
60
61 ret = vk_create_view( //
62 vk, // vk_bundle
63 image, // image
64 view_type, // type
65 format, // format
66 subresource_range, // subresource_range
67 &image_view); // out_image_view
68 VK_CHK_WITH_GOTO(ret, "vk_create_view", err_free);
69
70 VK_NAME_IMAGE_VIEW(vk, image_view, "distortion image view");
71
72 *out_device_memory = device_memory;
73 *out_image = image;
74 *out_image_view = image_view;
75
76 return VK_SUCCESS;
77
78err_free:
79 D(Image, image);
80 DF(Memory, device_memory);
81
82 return ret;
83}
84
85static void
86queue_upload_for_first_level_and_layer_locked(
87 struct vk_bundle *vk, VkCommandBuffer cmd, VkBuffer src, VkImage dst, VkExtent2D extent)
88{
89 VkImageSubresourceRange subresource_range = {
90 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
91 .baseMipLevel = 0,
92 .levelCount = VK_REMAINING_MIP_LEVELS,
93 .baseArrayLayer = 0,
94 .layerCount = VK_REMAINING_ARRAY_LAYERS,
95 };
96
97 vk_cmd_image_barrier_gpu_locked( //
98 vk, //
99 cmd, //
100 dst, //
101 0, //
102 VK_ACCESS_TRANSFER_WRITE_BIT, //
103 VK_IMAGE_LAYOUT_UNDEFINED, //
104 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, //
105 subresource_range); //
106
107 VkImageSubresourceLayers subresource_layers = {
108 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
109 .mipLevel = 0,
110 .baseArrayLayer = 0,
111 .layerCount = 1,
112 };
113
114 VkBufferImageCopy region = {
115 .bufferOffset = 0,
116 .bufferRowLength = 0,
117 .bufferImageHeight = 0,
118 .imageSubresource = subresource_layers,
119 .imageOffset = {0, 0, 0},
120 .imageExtent = {extent.width, extent.height, 1},
121 };
122
123 vk->vkCmdCopyBufferToImage( //
124 cmd, // commandBuffer
125 src, // srcBuffer
126 dst, // dstImage
127 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // dstImageLayout
128 1, // regionCount
129 ®ion); // pRegions
130
131 vk_cmd_image_barrier_gpu_locked( //
132 vk, //
133 cmd, //
134 dst, //
135 VK_ACCESS_TRANSFER_WRITE_BIT, //
136 VK_ACCESS_SHADER_READ_BIT, //
137 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, //
138 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, //
139 subresource_range); //
140}
141
142XRT_CHECK_RESULT static VkResult
143create_and_queue_upload_locked(struct vk_bundle *vk,
144 struct vk_cmd_pool *pool,
145 VkCommandBuffer cmd,
146 VkBuffer src_buffer,
147 VkDeviceMemory *out_image_device_memory,
148 VkImage *out_image,
149 VkImageView *out_image_view)
150{
151 VkExtent2D extent = {RENDER_DISTORTION_IMAGE_DIMENSIONS, RENDER_DISTORTION_IMAGE_DIMENSIONS};
152 VkDeviceMemory device_memory = VK_NULL_HANDLE;
153 VkImage image = VK_NULL_HANDLE;
154 VkImageView image_view = VK_NULL_HANDLE;
155 VkResult ret;
156
157 ret = create_distortion_image_and_view( //
158 vk, // vk_bundle
159 extent, // extent
160 &device_memory, // out_device_memory
161 &image, // out_image
162 &image_view); // out_image_view
163 VK_CHK_AND_RET(ret, "create_distortion_image_and_view");
164
165 queue_upload_for_first_level_and_layer_locked( //
166 vk, // vk_bundle
167 cmd, // cmd
168 src_buffer, // src
169 image, // dst
170 extent); // extent
171
172 *out_image_device_memory = device_memory;
173 *out_image = image;
174 *out_image_view = image_view;
175
176 return VK_SUCCESS;
177}
178
179/*!
180 * Helper struct to make code easier to read.
181 */
182struct texture
183{
184 struct xrt_vec2 pixels[RENDER_DISTORTION_IMAGE_DIMENSIONS][RENDER_DISTORTION_IMAGE_DIMENSIONS];
185};
186
187struct tan_angles_transforms
188{
189 struct xrt_vec2 offset;
190 struct xrt_vec2 scale;
191};
192
193XRT_CHECK_RESULT static VkResult
194create_and_fill_in_distortion_buffer_for_view(struct vk_bundle *vk,
195 struct xrt_device *xdev,
196 struct render_buffer *r_buffer,
197 struct render_buffer *g_buffer,
198 struct render_buffer *b_buffer,
199 uint32_t view,
200 bool pre_rotate)
201{
202 VkBufferUsageFlags usage_flags = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
203 VkMemoryPropertyFlags properties = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
204 VkResult ret;
205
206 struct xrt_matrix_2x2 rot = xdev->hmd->views[view].rot;
207
208 const struct xrt_matrix_2x2 rotation_90_cw = {{
209 .vecs =
210 {
211 {0, 1},
212 {-1, 0},
213 },
214 }};
215
216 if (pre_rotate) {
217 m_mat2x2_multiply(&rot, &rotation_90_cw, &rot);
218 }
219
220 VkDeviceSize size = sizeof(struct texture);
221
222 ret = render_buffer_init(vk, r_buffer, usage_flags, properties, size);
223 VK_CHK_WITH_GOTO(ret, "render_buffer_init", err_buffers);
224 VK_NAME_BUFFER(vk, r_buffer->buffer, "distortion r_buffer");
225 ret = render_buffer_init(vk, g_buffer, usage_flags, properties, size);
226 VK_CHK_WITH_GOTO(ret, "render_buffer_init", err_buffers);
227 VK_NAME_BUFFER(vk, g_buffer->buffer, "distortion g_buffer");
228 ret = render_buffer_init(vk, b_buffer, usage_flags, properties, size);
229 VK_CHK_WITH_GOTO(ret, "render_buffer_init", err_buffers);
230 VK_NAME_BUFFER(vk, b_buffer->buffer, "distortion b_buffer");
231
232 ret = render_buffer_map(vk, r_buffer);
233 VK_CHK_WITH_GOTO(ret, "render_buffer_map", err_buffers);
234 ret = render_buffer_map(vk, g_buffer);
235 VK_CHK_WITH_GOTO(ret, "render_buffer_map", err_buffers);
236 ret = render_buffer_map(vk, b_buffer);
237 VK_CHK_WITH_GOTO(ret, "render_buffer_map", err_buffers);
238
239 struct texture *r = r_buffer->mapped;
240 struct texture *g = g_buffer->mapped;
241 struct texture *b = b_buffer->mapped;
242
243 const double dim_minus_one_f64 = RENDER_DISTORTION_IMAGE_DIMENSIONS - 1;
244
245 for (int row = 0; row < RENDER_DISTORTION_IMAGE_DIMENSIONS; row++) {
246 // This goes from 0 to 1.0 inclusive.
247 float v = (float)(row / dim_minus_one_f64);
248
249 for (int col = 0; col < RENDER_DISTORTION_IMAGE_DIMENSIONS; col++) {
250 // This goes from 0 to 1.0 inclusive.
251 float u = (float)(col / dim_minus_one_f64);
252
253 // These need to go from -0.5 to 0.5 for the rotation
254 struct xrt_vec2 uv = {u - 0.5f, v - 0.5f};
255 m_mat2x2_transform_vec2(&rot, &uv, &uv);
256 uv.x += 0.5f;
257 uv.y += 0.5f;
258
259 struct xrt_uv_triplet result;
260 xrt_result_t xret = xrt_device_compute_distortion(xdev, view, uv.x, uv.y, &result);
261 if (xret != XRT_SUCCESS) {
262 VK_CHK_WITH_GOTO(VK_ERROR_UNKNOWN, "xrt_device_compute_distortion", err_buffers);
263 }
264
265 r->pixels[row][col] = result.r;
266 g->pixels[row][col] = result.g;
267 b->pixels[row][col] = result.b;
268 }
269 }
270
271 render_buffer_unmap(vk, r_buffer);
272 render_buffer_unmap(vk, g_buffer);
273 render_buffer_unmap(vk, b_buffer);
274
275 return VK_SUCCESS;
276
277err_buffers:
278 render_buffer_fini(vk, r_buffer);
279 render_buffer_fini(vk, g_buffer);
280 render_buffer_fini(vk, b_buffer);
281
282 return ret;
283}
284
285static bool
286render_distortion_buffer_init(struct render_resources *r,
287 struct vk_bundle *vk,
288 struct xrt_device *xdev,
289 bool pre_rotate)
290{
291 struct render_buffer bufs[RENDER_DISTORTION_IMAGES_SIZE];
292 VkDeviceMemory device_memories[RENDER_DISTORTION_IMAGES_SIZE];
293 VkImage images[RENDER_DISTORTION_IMAGES_SIZE];
294 VkImageView image_views[RENDER_DISTORTION_IMAGES_SIZE];
295 VkCommandBuffer upload_buffer = VK_NULL_HANDLE;
296 VkResult ret;
297
298
299 /*
300 * Basics
301 */
302
303 for (uint32_t i = 0; i < r->view_count; ++i) {
304 render_calc_uv_to_tangent_lengths_rect(&xdev->hmd->distortion.fov[i], &r->distortion.uv_to_tanangle[i]);
305 }
306
307 /*
308 * Buffers with data to upload.
309 * view_count=2,RRGGBB
310 * view_count=3,RRRGGGBBB
311 */
312 for (uint32_t i = 0; i < r->view_count; ++i) {
313 ret = create_and_fill_in_distortion_buffer_for_view(vk, xdev, &bufs[i], &bufs[r->view_count + i],
314 &bufs[2 * r->view_count + i], i, pre_rotate);
315 VK_CHK_WITH_GOTO(ret, "create_and_fill_in_distortion_buffer_for_view", err_resources);
316 }
317
318 /*
319 * Command submission.
320 */
321
322 struct vk_cmd_pool *pool = &r->distortion_pool;
323
324 vk_cmd_pool_lock(pool);
325
326 ret = vk_cmd_pool_create_and_begin_cmd_buffer_locked(vk, pool, 0, &upload_buffer);
327 VK_CHK_WITH_GOTO(ret, "vk_cmd_pool_create_and_begin_cmd_buffer_locked", err_unlock);
328 VK_NAME_COMMAND_BUFFER(vk, upload_buffer, "render_resources distortion command buffer");
329
330 for (uint32_t i = 0; i < RENDER_DISTORTION_IMAGES_COUNT(r); i++) {
331 ret = create_and_queue_upload_locked( //
332 vk, // vk_bundle
333 pool, // pool
334 upload_buffer, // cmd
335 bufs[i].buffer, // src_buffer
336 &device_memories[i], // out_image_device_memory
337 &images[i], // out_image
338 &image_views[i]); // out_image_view
339 VK_CHK_WITH_GOTO(ret, "create_and_queue_upload_locked", err_cmd);
340 }
341
342 ret = vk_cmd_pool_end_submit_wait_and_free_cmd_buffer_locked(vk, pool, upload_buffer);
343 VK_CHK_WITH_GOTO(ret, "vk_cmd_pool_end_submit_wait_and_free_cmd_buffer_locked", err_cmd);
344
345 vk_cmd_pool_unlock(pool);
346
347 /*
348 * Write results.
349 */
350
351 r->distortion.pre_rotated = pre_rotate;
352
353 for (uint32_t i = 0; i < RENDER_DISTORTION_IMAGES_COUNT(r); i++) {
354 r->distortion.device_memories[i] = device_memories[i];
355 r->distortion.images[i] = images[i];
356 r->distortion.image_views[i] = image_views[i];
357 }
358
359
360 /*
361 * Tidy
362 */
363
364 for (uint32_t i = 0; i < RENDER_DISTORTION_IMAGES_COUNT(r); i++) {
365 render_buffer_fini(vk, &bufs[i]);
366 }
367
368 return true;
369
370
371err_cmd:
372 vk->vkFreeCommandBuffers(vk->device, pool->pool, 1, &upload_buffer);
373
374err_unlock:
375 vk_cmd_pool_unlock(pool);
376
377err_resources:
378 for (uint32_t i = 0; i < RENDER_DISTORTION_IMAGES_COUNT(r); i++) {
379 D(ImageView, image_views[i]);
380 D(Image, images[i]);
381 DF(Memory, device_memories[i]);
382 render_buffer_fini(vk, &bufs[i]);
383 }
384
385 return false;
386}
387
388
389/*
390 *
391 * 'Exported' functions.
392 *
393 */
394
395void
396render_distortion_images_fini(struct render_resources *r)
397{
398 struct vk_bundle *vk = r->vk;
399
400 for (uint32_t i = 0; i < RENDER_DISTORTION_IMAGES_COUNT(r); i++) {
401 D(ImageView, r->distortion.image_views[i]);
402 D(Image, r->distortion.images[i]);
403 DF(Memory, r->distortion.device_memories[i]);
404 }
405}
406
407bool
408render_distortion_images_ensure(struct render_resources *r,
409 struct vk_bundle *vk,
410 struct xrt_device *xdev,
411 bool pre_rotate)
412{
413 if (r->distortion.image_views[0] == VK_NULL_HANDLE || pre_rotate != r->distortion.pre_rotated) {
414 render_distortion_images_fini(r);
415 return render_distortion_buffer_init(r, vk, xdev, pre_rotate);
416 }
417
418 return true;
419}