The open source OpenXR runtime
at mr/scanout-values 514 lines 15 kB view raw
1// Copyright 2021-2023, Collabora Ltd. 2// Author: Jakob Bornecrantz <jakob@collabora.com> 3// Author: Christoph Haag <christoph.haag@collabora.com> 4// SPDX-License-Identifier: BSL-1.0 5 6#version 460 7#extension GL_GOOGLE_include_directive : require 8 9#include "srgb.inc.glsl" 10 11//! @todo should this be a spcialization const? 12#define XRT_LAYER_PROJECTION 0 13#define XRT_LAYER_PROJECTION_DEPTH 1 14#define XRT_LAYER_QUAD 2 15#define XRT_LAYER_CUBE 3 16#define XRT_LAYER_CYLINDER 4 17#define XRT_LAYER_EQUIRECT1 5 18#define XRT_LAYER_EQUIRECT2 6 19 20const float PI = acos(-1); 21 22// Should we do timewarp. 23layout(constant_id = 1) const bool do_timewarp = false; 24layout(constant_id = 2) const bool do_color_correction = true; 25 26//! This is always set by the render_resource pipeline creation code to the actual limit. 27layout(constant_id = 3) const int RENDER_MAX_LAYERS = 128; 28layout(constant_id = 4) const int SAMPLER_ARRAY_SIZE = 16; 29 30layout(local_size_x = 8, local_size_y = 8, local_size_z = 1) in; 31 32// layer 0 color, [optional: layer 0 depth], layer 1, ... 33layout(set = 0, binding = 0) uniform sampler2D source[SAMPLER_ARRAY_SIZE]; 34layout(set = 0, binding = 2) uniform writeonly restrict image2D target; 35layout(set = 0, binding = 3, std140) uniform restrict Config 36{ 37 ivec4 view; 38 ivec4 layer_count; 39 40 vec4 pre_transform; 41 vec4 post_transform[RENDER_MAX_LAYERS]; 42 43 // corresponds to enum xrt_layer_type 44 uvec2 layer_type_and_unpremultiplied[RENDER_MAX_LAYERS]; 45 46 // which image/sampler(s) correspond to each layer 47 ivec2 images_samplers[RENDER_MAX_LAYERS]; 48 49 // shared between cylinder and equirect2 50 mat4 mv_inverse[RENDER_MAX_LAYERS]; 51 52 53 // for cylinder layer 54 vec4 cylinder_data[RENDER_MAX_LAYERS]; 55 56 57 // for equirect2 layer 58 vec4 eq2_data[RENDER_MAX_LAYERS]; 59 60 61 // for projection layers 62 63 // timewarp matrices 64 mat4 transform[RENDER_MAX_LAYERS]; 65 66 67 // for quad layers 68 69 // all quad transforms and coordinates are in view space 70 vec4 quad_position[RENDER_MAX_LAYERS]; 71 vec4 quad_normal[RENDER_MAX_LAYERS]; 72 mat4 inverse_quad_transform[RENDER_MAX_LAYERS]; 73 74 // quad extent in world scale 75 vec2 quad_extent[RENDER_MAX_LAYERS]; 76} ubo; 77 78 79vec2 position_to_view_uv(ivec2 extent, uint ix, uint iy) 80{ 81 // Turn the index into floating point. 82 vec2 xy = vec2(float(ix), float(iy)); 83 84 // The inverse of the extent of a view image is the pixel size in [0 .. 1] space. 85 vec2 extent_pixel_size = vec2(1.0 / float(extent.x), 1.0 / float(extent.y)); 86 87 // Per-target pixel we move the size of the pixels. 88 vec2 view_uv = xy * extent_pixel_size; 89 90 // Emulate a triangle sample position by offset half target pixel size. 91 view_uv = view_uv + extent_pixel_size / 2.0; 92 93 return view_uv; 94} 95 96vec2 transform_uv_subimage(vec2 uv, uint layer) 97{ 98 vec2 values = uv; 99 100 // To deal with OpenGL flip and sub image view. 101 values.xy = fma(values.xy, ubo.post_transform[layer].zw, ubo.post_transform[layer].xy); 102 103 // Ready to be used. 104 return values.xy; 105} 106 107vec2 transform_uv_timewarp(vec2 uv, uint layer) 108{ 109 vec4 values = vec4(uv, -1, 1); 110 111 // From uv to tan angle (tangent space). 112 values.xy = fma(values.xy, ubo.pre_transform.zw, ubo.pre_transform.xy); 113 values.y = -values.y; // Flip to OpenXR coordinate system. 114 115 // Timewarp. 116 values = ubo.transform[layer] * values; 117 values.xy = values.xy * (1.0 / max(values.w, 0.00001)); 118 119 // From [-1, 1] to [0, 1] 120 values.xy = values.xy * 0.5 + 0.5; 121 122 // To deal with OpenGL flip and sub image view. 123 values.xy = fma(values.xy, ubo.post_transform[layer].zw, ubo.post_transform[layer].xy); 124 125 // Done. 126 return values.xy; 127} 128 129vec2 transform_uv(vec2 uv, uint layer) 130{ 131 if (do_timewarp) { 132 return transform_uv_timewarp(uv, layer); 133 } else { 134 return transform_uv_subimage(uv, layer); 135 } 136} 137 138vec4 do_cylinder(vec2 view_uv, uint layer) 139{ 140 // Get ray position in model space. 141 const vec3 ray_origin = (ubo.mv_inverse[layer] * vec4(0, 0, 0, 1)).xyz; 142 143 // [0 .. 1] to tangent lengths (at unit Z). 144 const vec2 uv = fma(view_uv, ubo.pre_transform.zw, ubo.pre_transform.xy); 145 146 // With Z at the unit plane and flip y for OpenXR coordinate system, 147 // transform the ray into model space. 148 const vec3 ray_dir = normalize((ubo.mv_inverse[layer] * vec4(uv.x, -uv.y, -1, 0)).xyz); 149 150 const float radius = ubo.cylinder_data[layer].x; 151 const float central_angle = ubo.cylinder_data[layer].y; 152 const float aspect_ratio = ubo.cylinder_data[layer].z; 153 154 vec3 dir_from_cyl; 155 // CPU code will set +INFINITY to zero. 156 if (radius == 0) { 157 dir_from_cyl = ray_dir; 158 } else { 159 // Find if the cylinder intersects with the ray direction 160 // Inspired by Inigo Quilez 161 // https://iquilezles.org/articles/intersectors/ 162 163 const vec3 axis = vec3(0.f, 1.f, 0.f); 164 165 float card = dot(axis, ray_dir); 166 float caoc = dot(axis, ray_origin); 167 float a = 1.f - card * card; 168 float b = dot(ray_origin, ray_dir) - caoc * card; 169 float c = dot(ray_origin, ray_origin) - caoc * caoc - radius * radius; 170 float h = b * b - a * c; 171 if(h < 0.f) { 172 // no intersection 173 return vec4(0.f); 174 } 175 176 h = sqrt(h); 177 vec2 distances = vec2(-b - h, -b + h) / a; 178 179 if (distances.y < 0) { 180 return vec4(0.f); 181 } 182 183 dir_from_cyl = normalize(ray_origin + (ray_dir * distances.y)); 184 } 185 186 const float lon = atan(dir_from_cyl.x, -dir_from_cyl.z) / (2 * PI) + 0.5; // => [0, 1] 187 // float lat = -asin(dir_from_cyl.y); // => [-π/2, π/2] 188 // float y = tan(lat); // => [-inf, inf] 189 // simplified: -y/sqrt(1 - y^2) 190 const float y = -dir_from_cyl.y / sqrt(1 - (dir_from_cyl.y * dir_from_cyl.y)); // => [-inf, inf] 191 192 vec4 out_color = vec4(0.f); 193 194#ifdef DEBUG 195 const int lon_int = int(lon * 1000.f); 196 const int y_int = int(y * 1000.f); 197 198 if (lon < 0.001 && lon > -0.001) { 199 out_color = vec4(1, 0, 0, 1); 200 } else if (lon_int % 50 == 0) { 201 out_color = vec4(1, 1, 1, 1); 202 } else if (y_int % 50 == 0) { 203 out_color = vec4(1, 1, 1, 1); 204 } else { 205 out_color = vec4(lon, y, 0, 1); 206 } 207#endif 208 209 const float chan = central_angle / (PI * 2.f); 210 211 // height in radii, radius only matters for determining intersection 212 const float height = central_angle * aspect_ratio; 213 214 // Normalize [0, 2π] to [0, 1] 215 const float uhan = 0.5 + chan / 2.f; 216 const float lhan = 0.5 - chan / 2.f; 217 218 const float ymin = -height / 2; 219 const float ymax = height / 2; 220 221 if (y < ymax && y > ymin && lon < uhan && lon > lhan) { 222 // map configured display region to whole texture 223 vec2 offset = vec2(lhan, ymin); 224 vec2 extent = vec2(uhan - lhan, ymax - ymin); 225 vec2 sample_point = (vec2(lon, y) - offset) / extent; 226 227 vec2 uv_sub = fma(sample_point, ubo.post_transform[layer].zw, ubo.post_transform[layer].xy); 228 229 uint index = ubo.images_samplers[layer].x; 230#ifdef DEBUG 231 out_color += texture(source[index], uv_sub) / 2.f; 232#else 233 234 out_color = texture(source[index], uv_sub); 235#endif 236 } else { 237 out_color += vec4(0.f); 238 } 239 240 return out_color; 241} 242 243vec4 do_equirect2(vec2 view_uv, uint layer) 244{ 245 // Get ray position in model space. 246 const vec3 ray_origin = (ubo.mv_inverse[layer] * vec4(0, 0, 0, 1)).xyz; 247 248 // [0 .. 1] to tangent lengths (at unit Z). 249 const vec2 uv = fma(view_uv, ubo.pre_transform.zw, ubo.pre_transform.xy); 250 251 // With Z at the unit plane and flip y for OpenXR coordinate system, 252 // transform the ray into model space. 253 const vec3 ray_dir = normalize((ubo.mv_inverse[layer] * vec4(uv.x, -uv.y, -1, 0)).xyz); 254 255 const float radius = ubo.eq2_data[layer].x; 256 const float central_horizontal_angle = ubo.eq2_data[layer].y; 257 const float upper_vertical_angle = ubo.eq2_data[layer].z; 258 const float lower_vertical_angle = ubo.eq2_data[layer].w; 259 260 vec3 dir_from_sph; 261 // CPU code will set +INFINITY to zero. 262 if (radius == 0) { 263 dir_from_sph = ray_dir; 264 } else { 265 // Find if the sphere intersects with the ray using Pythagoras' 266 // theroem with a triangle formed by QC, H and the radius. 267 // Inspired by Inigo Quilez 268 // https://iquilezles.org/articles/intersectors/ 269 270 const float B = dot(ray_origin, ray_dir); 271 // QC is the point where the ray passes closest 272 const vec3 QC = ray_origin - B * ray_dir; 273 // If the distance is father than the radius, no hit 274 float H = radius * radius - dot(QC, QC); 275 if (H < 0.0) { 276 // no intersection 277 return vec4(0.f); 278 } 279 280 H = sqrt(H); 281 282 vec2 distances = vec2(-B - H, -B + H); 283 if (distances.y < 0) { 284 return vec4(0.f); 285 } 286 287 dir_from_sph = normalize(ray_origin + (ray_dir * distances.y)); 288 } 289 290 const float lon = atan(dir_from_sph.x, -dir_from_sph.z) / (2 * PI) + 0.5; 291 const float lat = acos(dir_from_sph.y) / PI; 292 293 vec4 out_color = vec4(0.f); 294 295#ifdef DEBUG 296 const int lon_int = int(lon * 1000.f); 297 const int lat_int = int(lat * 1000.f); 298 299 if (lon < 0.001 && lon > -0.001) { 300 out_color = vec4(1, 0, 0, 1); 301 } else if (lon_int % 50 == 0) { 302 out_color = vec4(1, 1, 1, 1); 303 } else if (lat_int % 50 == 0) { 304 out_color = vec4(1, 1, 1, 1); 305 } else { 306 out_color = vec4(lon, lat, 0, 1); 307 } 308#endif 309 310 const float chan = central_horizontal_angle / (PI * 2.0f); 311 312 // Normalize [0, 2π] to [0, 1] 313 const float uhan = 0.5 + chan / 2.0f; 314 const float lhan = 0.5 - chan / 2.0f; 315 316 // Normalize [-π/2, π/2] to [0, 1] 317 const float uvan = upper_vertical_angle / PI + 0.5f; 318 const float lvan = lower_vertical_angle / PI + 0.5f; 319 320 if (lat < uvan && lat > lvan && lon < uhan && lon > lhan) { 321 // map configured display region to whole texture 322 vec2 ll_offset = vec2(lhan, lvan); 323 vec2 ll_extent = vec2(uhan - lhan, uvan - lvan); 324 vec2 sample_point = (vec2(lon, lat) - ll_offset) / ll_extent; 325 326 vec2 uv_sub = fma(sample_point, ubo.post_transform[layer].zw, ubo.post_transform[layer].xy); 327 328 uint index = ubo.images_samplers[layer].x; 329#ifdef DEBUG 330 out_color += texture(source[index], uv_sub) / 2.0; 331#else 332 333 out_color = texture(source[index], uv_sub); 334#endif 335 } else { 336 out_color += vec4(0.f); 337 } 338 339 return out_color; 340} 341 342vec4 do_projection(vec2 view_uv, uint layer) 343{ 344 uint source_image_index = ubo.images_samplers[layer].x; 345 346 // Do any transformation needed. 347 vec2 uv = transform_uv(view_uv, layer); 348 349 // Sample the source. 350 vec4 colour = vec4(texture(source[source_image_index], uv).rgba); 351 352 return colour; 353} 354 355vec3 get_direction(vec2 uv) 356{ 357 // Skip the DIM/STRETCH/OFFSET stuff and go directly to values 358 vec4 values = vec4(uv, -1, 1); 359 360 // From uv to tan angle (tangent space). 361 values.xy = fma(values.xy, ubo.pre_transform.zw, ubo.pre_transform.xy); 362 values.y = -values.y; // Flip to OpenXR coordinate system. 363 364 // This works because values.xy are now in tangent space, that is the 365 // `tan(a)` on each of the x and y axis. That means values.xyz now 366 // define a point on the plane that sits at Z -1 and has a normal that 367 // runs parallel to the Z-axis. So if you run normalize you get a normal 368 // that points at that point. 369 vec3 direction = normalize(values.xyz); 370 371 return direction; 372} 373 374vec4 do_quad(vec2 view_uv, uint layer) 375{ 376 uint source_image_index = ubo.images_samplers[layer].x; 377 378 // center point of the plane in view space. 379 vec3 quad_position = ubo.quad_position[layer].xyz; 380 381 // normal vector of the plane. 382 vec3 normal = ubo.quad_normal[layer].xyz; 383 normal = normalize(normal); 384 385 // coordinate system is the view space, therefore the camera/eye position is in the origin. 386 vec3 camera = vec3(0.0, 0.0, 0.0); 387 388 // default color white should never be visible 389 vec4 colour = vec4(1.0, 1.0, 1.0, 1.0); 390 391 //! @todo can we get better "pixel stuck" on projection layers with timewarp uv? 392 // never use the timewarp uv here because it depends on the projection layer pose 393 vec2 uv = view_uv; 394 395 /* 396 * To fill in the view_uv texel on the target texture, an imaginary ray is shot through texels on the target 397 * texture. When this imaginary ray hits a quad layer, it means that when the respective color at the hit 398 * intersection is picked for the current view_uv texel, the final image as seen through the headset will 399 * show this view_uv texel at the respective location. 400 */ 401 vec3 direction = get_direction(uv); 402 direction = normalize(direction); 403 404 float denominator = dot(direction, normal); 405 406 // denominator is negative when vectors point towards each other, 0 when perpendicular, 407 // and positive when vectors point in a similar direction, i.e. direction vector faces quad backface, which we don't render. 408 if (denominator < 0.00001) { 409 // shortest distance between origin and plane defined by normal + quad_position 410 float dist = dot(camera - quad_position, normal); 411 412 // distance between origin and intersection point on the plane. 413 float intersection_dist = (dot(camera, normal) + dist) / -denominator; 414 415 // layer is behind camera as defined by direction vector 416 if (intersection_dist < 0) { 417 colour = vec4(0.0, 0.0, 0.0, 0.0); 418 return colour; 419 } 420 421 vec3 intersection = camera + intersection_dist * direction; 422 423 // ps for "plane space" 424 vec2 intersection_ps = (ubo.inverse_quad_transform[layer] * vec4(intersection.xyz, 1.0)).xy; 425 426 bool in_plane_bounds = 427 intersection_ps.x >= - ubo.quad_extent[layer].x / 2. && // 428 intersection_ps.x <= ubo.quad_extent[layer].x / 2. && // 429 intersection_ps.y >= - ubo.quad_extent[layer].y / 2. && // 430 intersection_ps.y <= ubo.quad_extent[layer].y / 2.; 431 432 if (in_plane_bounds) { 433 // intersection_ps is in [-quad_extent .. quad_extent]. Transform to [0 .. quad_extent], then scale to [ 0 .. 1 ] for sampling 434 vec2 plane_uv = (intersection_ps.xy + ubo.quad_extent[layer] / 2.) / ubo.quad_extent[layer]; 435 436 // sample on the desired subimage, not the entire texture 437 plane_uv = fma(plane_uv, ubo.post_transform[layer].zw, ubo.post_transform[layer].xy); 438 439 colour = texture(source[source_image_index], plane_uv); 440 } else { 441 // intersection on infinite plane outside of plane bounds 442 colour = vec4(0.0, 0.0, 0.0, 0.0); 443 return colour; 444 } 445 } else { 446 // no intersection with front face of infinite plane or perpendicular 447 colour = vec4(0.0, 0.0, 0.0, 0.0); 448 return colour; 449 } 450 451 return vec4(colour); 452} 453 454vec4 do_layers(vec2 view_uv) 455{ 456 vec4 accum = vec4(0, 0, 0, 0); 457 458 int layer_count = ubo.layer_count.x; 459 for (uint layer = 0; layer < layer_count; layer++) { 460 vec4 rgba = vec4(0, 0, 0, 0); 461 462 switch (ubo.layer_type_and_unpremultiplied[layer].x) { 463 case XRT_LAYER_CYLINDER: 464 rgba = do_cylinder(view_uv, layer); 465 break; 466 case XRT_LAYER_EQUIRECT2: 467 rgba = do_equirect2(view_uv, layer); 468 break; 469 case XRT_LAYER_PROJECTION: 470 case XRT_LAYER_PROJECTION_DEPTH: 471 rgba = do_projection(view_uv, layer); 472 break; 473 case XRT_LAYER_QUAD: 474 rgba = do_quad(view_uv, layer); 475 break; 476 default: break; 477 } 478 479 if (ubo.layer_type_and_unpremultiplied[layer].y != 0) { 480 // Unpremultipled blend factor of src.a. 481 accum.rgb = mix(accum.rgb, rgba.rgb, rgba.a); 482 } else { 483 // Premultiplied blend factor of 1. 484 accum.rgb = (accum.rgb * (1 - rgba.a)) + rgba.rgb; 485 } 486 accum.a = fma((1.f - rgba.a), accum.a, rgba.a); 487 } 488 489 return accum; 490} 491 492void main() 493{ 494 uint ix = gl_GlobalInvocationID.x; 495 uint iy = gl_GlobalInvocationID.y; 496 497 ivec2 offset = ivec2(ubo.view.xy); 498 ivec2 extent = ivec2(ubo.view.zw); 499 500 if (ix >= extent.x || iy >= extent.y) { 501 return; 502 } 503 504 vec2 view_uv = position_to_view_uv(extent, ix, iy); 505 506 vec4 colour = do_layers(view_uv); 507 508 if (do_color_correction) { 509 // Do colour correction here since there are no automatic conversion in hardware available. 510 colour.rgb = from_linear_to_srgb(colour.rgb); 511 } 512 513 imageStore(target, ivec2(offset.x + ix, offset.y + iy), colour); 514}