The open source OpenXR runtime
1// Copyright 2021-2023, Collabora Ltd.
2// Copyright 2025, NVIDIA CORPORATION.
3// Author: Jakob Bornecrantz <jakob@collabora.com>
4// Author: Christoph Haag <christoph.haag@collabora.com>
5// SPDX-License-Identifier: BSL-1.0
6
7#version 460
8#extension GL_GOOGLE_include_directive : require
9
10#include "srgb.inc.glsl"
11#include "layer_defines.inc.glsl"
12
13
14struct layer_data
15{
16 uint layer_type;
17 uint unpremultiplied_alpha;
18
19 // This struct is used in an array, gets padded to vec4.
20 uint _padding0;
21 uint _padding1;
22};
23
24struct image_info
25{
26 uint color_image_index;
27 uint depth_image_index;
28
29 // This struct is used in an array, gets padded to vec4.
30 uint _padding0;
31 uint _padding1;
32};
33
34
35const float PI = acos(-1);
36
37// Should we do timewarp.
38layout(constant_id = 1) const bool do_timewarp = false;
39layout(constant_id = 2) const bool do_color_correction = true;
40
41//! This is always set by the render_resource pipeline creation code to the actual limit.
42layout(constant_id = 3) const int RENDER_MAX_LAYERS = 128;
43layout(constant_id = 4) const int SAMPLER_ARRAY_SIZE = 16;
44
45layout(local_size_x = 8, local_size_y = 8, local_size_z = 1) in;
46
47// layer 0 color, [optional: layer 0 depth], layer 1, ...
48layout(set = 0, binding = 0) uniform sampler2D source[SAMPLER_ARRAY_SIZE];
49layout(set = 0, binding = 2) uniform writeonly restrict image2D target;
50layout(set = 0, binding = 3, std140) uniform restrict Config
51{
52 ivec4 view;
53 ivec4 layer_count;
54
55 vec4 pre_transform;
56 vec4 post_transform[RENDER_MAX_LAYERS];
57
58 // Per-layer data.
59 layer_data layer_data[RENDER_MAX_LAYERS];
60
61 // which image/sampler(s) correspond to each layer
62 image_info image_info[RENDER_MAX_LAYERS];
63
64 // shared between cylinder and equirect2
65 mat4 mv_inverse[RENDER_MAX_LAYERS];
66
67
68 // for cylinder layer
69 vec4 cylinder_data[RENDER_MAX_LAYERS];
70
71
72 // for equirect2 layer
73 vec4 eq2_data[RENDER_MAX_LAYERS];
74
75
76 // for projection layers
77
78 // timewarp matrices
79 mat4 transform[RENDER_MAX_LAYERS];
80
81
82 // for quad layers
83
84 // all quad transforms and coordinates are in view space
85 vec4 quad_position[RENDER_MAX_LAYERS];
86 vec4 quad_normal[RENDER_MAX_LAYERS];
87 mat4 inverse_quad_transform[RENDER_MAX_LAYERS];
88
89 // quad extent in world scale
90 vec2 quad_extent[RENDER_MAX_LAYERS];
91} ubo;
92
93
94vec2 position_to_view_uv(ivec2 extent, uint ix, uint iy)
95{
96 // Turn the index into floating point.
97 vec2 xy = vec2(float(ix), float(iy));
98
99 // The inverse of the extent of a view image is the pixel size in [0 .. 1] space.
100 vec2 extent_pixel_size = vec2(1.0 / float(extent.x), 1.0 / float(extent.y));
101
102 // Per-target pixel we move the size of the pixels.
103 vec2 view_uv = xy * extent_pixel_size;
104
105 // Emulate a triangle sample position by offset half target pixel size.
106 view_uv = view_uv + extent_pixel_size / 2.0;
107
108 return view_uv;
109}
110
111vec2 transform_uv_subimage(vec2 uv, uint layer)
112{
113 vec2 values = uv;
114
115 // To deal with OpenGL flip and sub image view.
116 values.xy = fma(values.xy, ubo.post_transform[layer].zw, ubo.post_transform[layer].xy);
117
118 // Ready to be used.
119 return values.xy;
120}
121
122vec2 transform_uv_timewarp(vec2 uv, uint layer)
123{
124 vec4 values = vec4(uv, -1, 1);
125
126 // From uv to tan angle (tangent space).
127 values.xy = fma(values.xy, ubo.pre_transform.zw, ubo.pre_transform.xy);
128 values.y = -values.y; // Flip to OpenXR coordinate system.
129
130 // Timewarp.
131 values = ubo.transform[layer] * values;
132 values.xy = values.xy * (1.0 / max(values.w, 0.00001));
133
134 // From [-1, 1] to [0, 1]
135 values.xy = values.xy * 0.5 + 0.5;
136
137 // To deal with OpenGL flip and sub image view.
138 values.xy = fma(values.xy, ubo.post_transform[layer].zw, ubo.post_transform[layer].xy);
139
140 // Done.
141 return values.xy;
142}
143
144vec2 transform_uv(vec2 uv, uint layer)
145{
146 if (do_timewarp) {
147 return transform_uv_timewarp(uv, layer);
148 } else {
149 return transform_uv_subimage(uv, layer);
150 }
151}
152
153vec4 do_cylinder(vec2 view_uv, uint layer)
154{
155 // Get ray position in model space.
156 const vec3 ray_origin = (ubo.mv_inverse[layer] * vec4(0, 0, 0, 1)).xyz;
157
158 // [0 .. 1] to tangent lengths (at unit Z).
159 const vec2 uv = fma(view_uv, ubo.pre_transform.zw, ubo.pre_transform.xy);
160
161 // With Z at the unit plane and flip y for OpenXR coordinate system,
162 // transform the ray into model space.
163 const vec3 ray_dir = normalize((ubo.mv_inverse[layer] * vec4(uv.x, -uv.y, -1, 0)).xyz);
164
165 const float radius = ubo.cylinder_data[layer].x;
166 const float central_angle = ubo.cylinder_data[layer].y;
167 const float aspect_ratio = ubo.cylinder_data[layer].z;
168
169 vec3 dir_from_cyl;
170 // CPU code will set +INFINITY to zero.
171 if (radius == 0) {
172 dir_from_cyl = ray_dir;
173 } else {
174 // Find if the cylinder intersects with the ray direction
175 // Inspired by Inigo Quilez
176 // https://iquilezles.org/articles/intersectors/
177
178 const vec3 axis = vec3(0.f, 1.f, 0.f);
179
180 float card = dot(axis, ray_dir);
181 float caoc = dot(axis, ray_origin);
182 float a = 1.f - card * card;
183 float b = dot(ray_origin, ray_dir) - caoc * card;
184 float c = dot(ray_origin, ray_origin) - caoc * caoc - radius * radius;
185 float h = b * b - a * c;
186 if(h < 0.f) {
187 // no intersection
188 return vec4(0.f);
189 }
190
191 h = sqrt(h);
192 vec2 distances = vec2(-b - h, -b + h) / a;
193
194 if (distances.y < 0) {
195 return vec4(0.f);
196 }
197
198 dir_from_cyl = normalize(ray_origin + (ray_dir * distances.y));
199 }
200
201 const float lon = atan(dir_from_cyl.x, -dir_from_cyl.z) / (2 * PI) + 0.5; // => [0, 1]
202 // float lat = -asin(dir_from_cyl.y); // => [-π/2, π/2]
203 // float y = tan(lat); // => [-inf, inf]
204 // simplified: -y/sqrt(1 - y^2)
205 const float y = -dir_from_cyl.y / sqrt(1 - (dir_from_cyl.y * dir_from_cyl.y)); // => [-inf, inf]
206
207 vec4 out_color = vec4(0.f);
208
209#ifdef DEBUG
210 const int lon_int = int(lon * 1000.f);
211 const int y_int = int(y * 1000.f);
212
213 if (lon < 0.001 && lon > -0.001) {
214 out_color = vec4(1, 0, 0, 1);
215 } else if (lon_int % 50 == 0) {
216 out_color = vec4(1, 1, 1, 1);
217 } else if (y_int % 50 == 0) {
218 out_color = vec4(1, 1, 1, 1);
219 } else {
220 out_color = vec4(lon, y, 0, 1);
221 }
222#endif
223
224 const float chan = central_angle / (PI * 2.f);
225
226 // height in radii, radius only matters for determining intersection
227 const float height = central_angle * aspect_ratio;
228
229 // Normalize [0, 2π] to [0, 1]
230 const float uhan = 0.5 + chan / 2.f;
231 const float lhan = 0.5 - chan / 2.f;
232
233 const float ymin = -height / 2;
234 const float ymax = height / 2;
235
236 if (y < ymax && y > ymin && lon < uhan && lon > lhan) {
237 // map configured display region to whole texture
238 vec2 offset = vec2(lhan, ymin);
239 vec2 extent = vec2(uhan - lhan, ymax - ymin);
240 vec2 sample_point = (vec2(lon, y) - offset) / extent;
241
242 vec2 uv_sub = fma(sample_point, ubo.post_transform[layer].zw, ubo.post_transform[layer].xy);
243
244 uint index = ubo.image_info[layer].color_image_index;
245#ifdef DEBUG
246 out_color += texture(source[index], uv_sub) / 2.f;
247#else
248
249 out_color = texture(source[index], uv_sub);
250#endif
251 } else {
252 out_color += vec4(0.f);
253 }
254
255 return out_color;
256}
257
258vec4 do_equirect2(vec2 view_uv, uint layer)
259{
260 // Get ray position in model space.
261 const vec3 ray_origin = (ubo.mv_inverse[layer] * vec4(0, 0, 0, 1)).xyz;
262
263 // [0 .. 1] to tangent lengths (at unit Z).
264 const vec2 uv = fma(view_uv, ubo.pre_transform.zw, ubo.pre_transform.xy);
265
266 // With Z at the unit plane and flip y for OpenXR coordinate system,
267 // transform the ray into model space.
268 const vec3 ray_dir = normalize((ubo.mv_inverse[layer] * vec4(uv.x, -uv.y, -1, 0)).xyz);
269
270 const float radius = ubo.eq2_data[layer].x;
271 const float central_horizontal_angle = ubo.eq2_data[layer].y;
272 const float upper_vertical_angle = ubo.eq2_data[layer].z;
273 const float lower_vertical_angle = ubo.eq2_data[layer].w;
274
275 vec3 dir_from_sph;
276 // CPU code will set +INFINITY to zero.
277 if (radius == 0) {
278 dir_from_sph = ray_dir;
279 } else {
280 // Find if the sphere intersects with the ray using Pythagoras'
281 // theroem with a triangle formed by QC, H and the radius.
282 // Inspired by Inigo Quilez
283 // https://iquilezles.org/articles/intersectors/
284
285 const float B = dot(ray_origin, ray_dir);
286 // QC is the point where the ray passes closest
287 const vec3 QC = ray_origin - B * ray_dir;
288 // If the distance is father than the radius, no hit
289 float H = radius * radius - dot(QC, QC);
290 if (H < 0.0) {
291 // no intersection
292 return vec4(0.f);
293 }
294
295 H = sqrt(H);
296
297 vec2 distances = vec2(-B - H, -B + H);
298 if (distances.y < 0) {
299 return vec4(0.f);
300 }
301
302 dir_from_sph = normalize(ray_origin + (ray_dir * distances.y));
303 }
304
305 const float lon = atan(dir_from_sph.x, -dir_from_sph.z) / (2 * PI) + 0.5;
306 const float lat = acos(dir_from_sph.y) / PI;
307
308 vec4 out_color = vec4(0.f);
309
310#ifdef DEBUG
311 const int lon_int = int(lon * 1000.f);
312 const int lat_int = int(lat * 1000.f);
313
314 if (lon < 0.001 && lon > -0.001) {
315 out_color = vec4(1, 0, 0, 1);
316 } else if (lon_int % 50 == 0) {
317 out_color = vec4(1, 1, 1, 1);
318 } else if (lat_int % 50 == 0) {
319 out_color = vec4(1, 1, 1, 1);
320 } else {
321 out_color = vec4(lon, lat, 0, 1);
322 }
323#endif
324
325 const float chan = central_horizontal_angle / (PI * 2.0f);
326
327 // Normalize [0, 2π] to [0, 1]
328 const float uhan = 0.5 + chan / 2.0f;
329 const float lhan = 0.5 - chan / 2.0f;
330
331 // Normalize [-π/2, π/2] to [0, 1]
332 const float uvan = upper_vertical_angle / PI + 0.5f;
333 const float lvan = lower_vertical_angle / PI + 0.5f;
334
335 if (lat < uvan && lat > lvan && lon < uhan && lon > lhan) {
336 // map configured display region to whole texture
337 vec2 ll_offset = vec2(lhan, lvan);
338 vec2 ll_extent = vec2(uhan - lhan, uvan - lvan);
339 vec2 sample_point = (vec2(lon, lat) - ll_offset) / ll_extent;
340
341 vec2 uv_sub = fma(sample_point, ubo.post_transform[layer].zw, ubo.post_transform[layer].xy);
342
343 uint index = ubo.image_info[layer].color_image_index;
344#ifdef DEBUG
345 out_color += texture(source[index], uv_sub) / 2.0;
346#else
347
348 out_color = texture(source[index], uv_sub);
349#endif
350 } else {
351 out_color += vec4(0.f);
352 }
353
354 return out_color;
355}
356
357vec4 do_projection(vec2 view_uv, uint layer)
358{
359 uint source_image_index = ubo.image_info[layer].color_image_index;
360
361 // Do any transformation needed.
362 vec2 uv = transform_uv(view_uv, layer);
363
364 // Sample the source.
365 vec4 colour = vec4(texture(source[source_image_index], uv).rgba);
366
367 return colour;
368}
369
370vec3 get_direction(vec2 uv)
371{
372 // Skip the DIM/STRETCH/OFFSET stuff and go directly to values
373 vec4 values = vec4(uv, -1, 1);
374
375 // From uv to tan angle (tangent space).
376 values.xy = fma(values.xy, ubo.pre_transform.zw, ubo.pre_transform.xy);
377 values.y = -values.y; // Flip to OpenXR coordinate system.
378
379 // This works because values.xy are now in tangent space, that is the
380 // `tan(a)` on each of the x and y axis. That means values.xyz now
381 // define a point on the plane that sits at Z -1 and has a normal that
382 // runs parallel to the Z-axis. So if you run normalize you get a normal
383 // that points at that point.
384 vec3 direction = normalize(values.xyz);
385
386 return direction;
387}
388
389vec4 do_quad(vec2 view_uv, uint layer)
390{
391 uint source_image_index = ubo.image_info[layer].color_image_index;
392
393 // center point of the plane in view space.
394 vec3 quad_position = ubo.quad_position[layer].xyz;
395
396 // normal vector of the plane.
397 vec3 normal = ubo.quad_normal[layer].xyz;
398 normal = normalize(normal);
399
400 // coordinate system is the view space, therefore the camera/eye position is in the origin.
401 vec3 camera = vec3(0.0, 0.0, 0.0);
402
403 // default color white should never be visible
404 vec4 colour = vec4(1.0, 1.0, 1.0, 1.0);
405
406 //! @todo can we get better "pixel stuck" on projection layers with timewarp uv?
407 // never use the timewarp uv here because it depends on the projection layer pose
408 vec2 uv = view_uv;
409
410 /*
411 * To fill in the view_uv texel on the target texture, an imaginary ray is shot through texels on the target
412 * texture. When this imaginary ray hits a quad layer, it means that when the respective color at the hit
413 * intersection is picked for the current view_uv texel, the final image as seen through the headset will
414 * show this view_uv texel at the respective location.
415 */
416 vec3 direction = get_direction(uv);
417 direction = normalize(direction);
418
419 float denominator = dot(direction, normal);
420
421 // denominator is negative when vectors point towards each other, 0 when perpendicular,
422 // and positive when vectors point in a similar direction, i.e. direction vector faces quad backface, which we don't render.
423 if (denominator < 0.00001) {
424 // shortest distance between origin and plane defined by normal + quad_position
425 float dist = dot(camera - quad_position, normal);
426
427 // distance between origin and intersection point on the plane.
428 float intersection_dist = (dot(camera, normal) + dist) / -denominator;
429
430 // layer is behind camera as defined by direction vector
431 if (intersection_dist < 0) {
432 colour = vec4(0.0, 0.0, 0.0, 0.0);
433 return colour;
434 }
435
436 vec3 intersection = camera + intersection_dist * direction;
437
438 // ps for "plane space"
439 vec2 intersection_ps = (ubo.inverse_quad_transform[layer] * vec4(intersection.xyz, 1.0)).xy;
440
441 bool in_plane_bounds =
442 intersection_ps.x >= - ubo.quad_extent[layer].x / 2. && //
443 intersection_ps.x <= ubo.quad_extent[layer].x / 2. && //
444 intersection_ps.y >= - ubo.quad_extent[layer].y / 2. && //
445 intersection_ps.y <= ubo.quad_extent[layer].y / 2.;
446
447 if (in_plane_bounds) {
448 // intersection_ps is in [-quad_extent .. quad_extent]. Transform to [0 .. quad_extent], then scale to [ 0 .. 1 ] for sampling
449 vec2 plane_uv = (intersection_ps.xy + ubo.quad_extent[layer] / 2.) / ubo.quad_extent[layer];
450
451 // sample on the desired subimage, not the entire texture
452 plane_uv = fma(plane_uv, ubo.post_transform[layer].zw, ubo.post_transform[layer].xy);
453
454 colour = texture(source[source_image_index], plane_uv);
455 } else {
456 // intersection on infinite plane outside of plane bounds
457 colour = vec4(0.0, 0.0, 0.0, 0.0);
458 return colour;
459 }
460 } else {
461 // no intersection with front face of infinite plane or perpendicular
462 colour = vec4(0.0, 0.0, 0.0, 0.0);
463 return colour;
464 }
465
466 return vec4(colour);
467}
468
469vec4 do_layers(vec2 view_uv)
470{
471 vec4 accum = vec4(0, 0, 0, 0);
472
473 int layer_count = ubo.layer_count.x;
474 for (uint layer = 0; layer < layer_count; layer++) {
475 vec4 rgba = vec4(0, 0, 0, 0);
476
477 switch (ubo.layer_data[layer].layer_type) {
478 case LAYER_COMP_TYPE_QUAD:
479 rgba = do_quad(view_uv, layer);
480 break;
481 case LAYER_COMP_TYPE_CYLINDER:
482 rgba = do_cylinder(view_uv, layer);
483 break;
484 case LAYER_COMP_TYPE_EQUIRECT2:
485 rgba = do_equirect2(view_uv, layer);
486 break;
487 case LAYER_COMP_TYPE_PROJECTION:
488 rgba = do_projection(view_uv, layer);
489 break;
490 default: break;
491 }
492
493 if (ubo.layer_data[layer].unpremultiplied_alpha != 0) {
494 // Unpremultipled blend factor of src.a.
495 accum.rgb = mix(accum.rgb, rgba.rgb, rgba.a);
496 } else {
497 // Premultiplied blend factor of 1.
498 accum.rgb = (accum.rgb * (1 - rgba.a)) + rgba.rgb;
499 }
500 accum.a = fma((1.f - rgba.a), accum.a, rgba.a);
501 }
502
503 return accum;
504}
505
506void main()
507{
508 uint ix = gl_GlobalInvocationID.x;
509 uint iy = gl_GlobalInvocationID.y;
510
511 ivec2 offset = ivec2(ubo.view.xy);
512 ivec2 extent = ivec2(ubo.view.zw);
513
514 if (ix >= extent.x || iy >= extent.y) {
515 return;
516 }
517
518 vec2 view_uv = position_to_view_uv(extent, ix, iy);
519
520 vec4 colour = do_layers(view_uv);
521
522 if (do_color_correction) {
523 // Do colour correction here since there are no automatic conversion in hardware available.
524 colour.rgb = from_linear_to_srgb(colour.rgb);
525 }
526
527 imageStore(target, ivec2(offset.x + ix, offset.y + iy), colour);
528}