The open source OpenXR runtime
1// Copyright 2021-2022, Collabora Ltd.
2// Author: Jakob Bornecrantz <jakob@collabora.com>
3// SPDX-License-Identifier: BSL-1.0
4
5#version 460
6#extension GL_GOOGLE_include_directive : require
7
8#include "srgb.inc.glsl"
9
10
11// The size of the distortion texture dimensions in texels.
12layout(constant_id = 0) const int distortion_texel_count = 2;
13
14// Should we do timewarp.
15layout(constant_id = 1) const bool do_timewarp = false;
16
17layout(local_size_x = 8, local_size_y = 8, local_size_z = 1) in;
18
19layout(set = 0, binding = 0) uniform sampler2D source[2];
20layout(set = 0, binding = 1) uniform sampler2D distortion[6];
21layout(set = 0, binding = 2) uniform writeonly restrict image2D target;
22layout(set = 0, binding = 3, std140) uniform restrict Config
23{
24 ivec4 views[2];
25 vec4 pre_transform[2];
26 vec4 post_transform[2];
27 mat4 transform_timewarp_scanout_begin[2];
28 mat4 transform_timewarp_scanout_end[2];
29}ubo;
30
31
32vec2 position_to_uv(ivec2 extent, uint ix, uint iy)
33{
34 // Turn the index into floating point.
35 vec2 xy = vec2(float(ix), float(iy));
36
37 // The inverse of the extent of the target image is the pixel size in [0 .. 1] space.
38 vec2 extent_pixel_size = vec2(1.0 / float(extent.x), 1.0 / float(extent.y));
39
40 // Per-target pixel we move the size of the pixels.
41 vec2 dist_uv = xy * extent_pixel_size;
42
43 // Emulate a triangle sample position by offset half target pixel size.
44 dist_uv = dist_uv + extent_pixel_size / 2.0;
45
46
47 // To correctly sample we need to put position (0, 0) in the
48 // middle of the (0, 0) texel in the distortion textures. That's why we
49 // offset with half the texel size, pushing all samples into the middle
50 // of each texels, a kin to a vertex buffer. We need to put uv coord
51 // (1, 1) in the middle of the last texel, that pixel is (size - 1)
52 // texels away from the first texel. So we need to scale [0 .. 1] to
53 // [0 .. size - 1].
54
55#define DIM (float(distortion_texel_count))
56#define STRETCH ((DIM - 1.0) / DIM)
57#define OFFSET (1.0 / (DIM * 2.0))
58
59 dist_uv = (dist_uv * STRETCH) + OFFSET;
60
61 return dist_uv;
62}
63
64vec2 transform_uv_subimage(vec2 uv, uint iz)
65{
66 vec2 values = uv;
67
68 // To deal with OpenGL flip and sub image view.
69 values.xy = values.xy * ubo.post_transform[iz].zw + ubo.post_transform[iz].xy;
70
71 // Ready to be used.
72 return values.xy;
73}
74
75vec2 transform_uv_timewarp(vec2 uv, uint iz)
76{
77 vec4 values = vec4(uv, -1, 1);
78
79 // From uv to tan angle (tangent space).
80 values.xy = values.xy * ubo.pre_transform[iz].zw + ubo.pre_transform[iz].xy;
81 values.y = -values.y; // Flip to OpenXR coordinate system.
82
83 // Timewarp including scanline timewarp for rolling refresh panels.
84 values = ubo.transform_timewarp_scanout_begin[iz] * values * (1 - uv.y) +
85 ubo.transform_timewarp_scanout_end[iz] * values * uv.y;
86 values.xy = values.xy * (1.0 / max(values.w, 0.00001));
87
88 // From [-1, 1] to [0, 1]
89 values.xy = values.xy * 0.5 + 0.5;
90
91 // Done.
92 return values.xy;
93}
94
95vec2 transform_uv(vec2 uv, uint iz)
96{
97 if (do_timewarp) {
98 uv = transform_uv_timewarp(uv, iz);
99 }
100
101 return transform_uv_subimage(uv, iz);
102}
103
104void main()
105{
106 uint ix = gl_GlobalInvocationID.x;
107 uint iy = gl_GlobalInvocationID.y;
108 uint iz = gl_GlobalInvocationID.z;
109
110 ivec2 offset = ivec2(ubo.views[iz].xy);
111 ivec2 extent = ivec2(ubo.views[iz].zw);
112
113 if (ix >= extent.x || iy >= extent.y) {
114 return;
115 }
116
117 vec2 dist_uv = position_to_uv(extent, ix, iy);
118
119 vec2 r_uv = texture(distortion[iz + 0], dist_uv).xy;
120 vec2 g_uv = texture(distortion[iz + 2], dist_uv).xy;
121 vec2 b_uv = texture(distortion[iz + 4], dist_uv).xy;
122
123 // Do any transformation needed.
124 r_uv = transform_uv(r_uv, iz);
125 g_uv = transform_uv(g_uv, iz);
126 b_uv = transform_uv(b_uv, iz);
127
128 // Sample the source with distorted and chromatic-aberration corrected samples.
129 vec4 colour = vec4( //
130 texture(source[iz], r_uv).r, //
131 texture(source[iz], g_uv).g, //
132 texture(source[iz], b_uv).b, //
133 1); //
134
135 // Do colour correction here since there are no automatic conversion in hardware available.
136 colour = vec4(from_linear_to_srgb(colour.rgb), 1);
137
138 imageStore(target, ivec2(offset.x + ix, offset.y + iy), colour);
139}