The open source OpenXR runtime
1// Copyright 2020-2023, Collabora, Ltd.
2// Copyright 2020, Moshi Turner.
3// SPDX-License-Identifier: BSL-1.0
4/*!
5 * @file
6 * @brief SimulaVR driver code.
7 * @author Moshi Turner <moshiturner@protonmail.com>
8 * @ingroup drv_svr
9 */
10
11#include "math/m_mathinclude.h"
12
13#include <stdio.h>
14#include <stdlib.h>
15#include <string.h>
16#include <assert.h>
17
18#include "svr_interface.h"
19
20#include "xrt/xrt_defines.h"
21#include "xrt/xrt_device.h"
22
23#include "math/m_api.h"
24#include "math/m_space.h"
25#include "math/m_vec2.h"
26
27#include "os/os_time.h"
28#include "os/os_threading.h"
29
30
31#include "util/u_var.h"
32#include "util/u_debug.h"
33#include "util/u_device.h"
34#include "util/u_time.h"
35#include "util/u_json.h"
36#include "util/u_misc.h"
37#include "util/u_logging.h"
38#include "util/u_distortion_mesh.h"
39
40
41DEBUG_GET_ONCE_LOG_OPTION(svr_log, "SIMULA_LOG", U_LOGGING_INFO)
42
43#define SVR_TRACE(d, ...) U_LOG_XDEV_IFL_T(&d->base, d->log_level, __VA_ARGS__)
44#define SVR_DEBUG(d, ...) U_LOG_XDEV_IFL_D(&d->base, d->log_level, __VA_ARGS__)
45#define SVR_INFO(d, ...) U_LOG_XDEV_IFL_I(&d->base, d->log_level, __VA_ARGS__)
46#define SVR_WARN(d, ...) U_LOG_XDEV_IFL_W(&d->base, d->log_level, __VA_ARGS__)
47#define SVR_ERROR(d, ...) U_LOG_XDEV_IFL_E(&d->base, d->log_level, __VA_ARGS__)
48
49struct svr_hmd
50{
51 struct xrt_device base;
52
53 struct svr_two_displays_distortion distortion;
54
55 enum u_logging_level log_level;
56};
57
58static inline struct svr_hmd *
59svr_hmd(struct xrt_device *xdev)
60{
61 return (struct svr_hmd *)xdev;
62}
63
64static void
65svr_hmd_destroy(struct xrt_device *xdev)
66{
67 struct svr_hmd *ns = svr_hmd(xdev);
68
69 // Remove the variable tracking.
70 u_var_remove_root(ns);
71
72 u_device_free(&ns->base);
73}
74
75static xrt_result_t
76svr_hmd_get_tracked_pose(struct xrt_device *xdev,
77 enum xrt_input_name name,
78 int64_t at_timestamp_ns,
79 struct xrt_space_relation *out_relation)
80{
81 struct svr_hmd *ns = svr_hmd(xdev);
82
83 if (name != XRT_INPUT_GENERIC_HEAD_POSE) {
84 SVR_ERROR(ns, "unknown input name");
85 U_LOG_XDEV_UNSUPPORTED_INPUT(&ns->base, ns->log_level, name);
86 return XRT_ERROR_INPUT_UNSUPPORTED;
87 }
88
89
90 out_relation->angular_velocity = (struct xrt_vec3)XRT_VEC3_ZERO;
91 out_relation->linear_velocity = (struct xrt_vec3)XRT_VEC3_ZERO;
92 out_relation->pose =
93 (struct xrt_pose)XRT_POSE_IDENTITY; // This is so that tracking overrides/multi driver just transforms us by
94 // the tracker + offset from the tracker.
95 out_relation->relation_flags = XRT_SPACE_RELATION_BITMASK_ALL;
96
97 return XRT_SUCCESS;
98}
99
100static xrt_result_t
101svr_hmd_get_view_poses(struct xrt_device *xdev,
102 const struct xrt_vec3 *default_eye_relation,
103 int64_t at_timestamp_ns,
104 enum xrt_view_type view_type,
105 uint32_t view_count,
106 struct xrt_space_relation *out_head_relation,
107 struct xrt_fov *out_fovs,
108 struct xrt_pose *out_poses)
109{
110 //!@todo: default_eye_relation inherits from the env var OXR_DEBUG_IPD_MM / oxr_session.c
111 // probably needs a lot more attention
112
113 xrt_result_t xret = u_device_get_view_poses( //
114 xdev, //
115 default_eye_relation, //
116 at_timestamp_ns, //
117 view_type, //
118 view_count, //
119 out_head_relation, //
120 out_fovs, //
121 out_poses); //
122 if (xret != XRT_SUCCESS) {
123 return xret;
124 }
125
126 //!@todo you may need to invert this - I can't test locally
127 float turn_vals[2] = {5.0, -5.0};
128 for (uint32_t i = 0; i < view_count && i < 2; i++) {
129 struct xrt_vec3 y_up = (struct xrt_vec3)XRT_VEC3_UNIT_Y;
130 math_quat_from_angle_vector(DEG_TO_RAD(turn_vals[i]), &y_up, &out_poses[i].orientation);
131 }
132
133 return XRT_SUCCESS;
134}
135
136//!@todo: remove hard-coding and move to u_distortion_mesh
137static xrt_result_t
138svr_mesh_calc(struct xrt_device *xdev, uint32_t view, float u, float v, struct xrt_uv_triplet *result)
139{
140 struct svr_hmd *svr = svr_hmd(xdev);
141
142 struct svr_one_display_distortion *dist = &svr->distortion.views[view];
143
144 struct svr_display_distortion_polynomial_values *distortion_channels[3] = {&dist->red, &dist->green,
145 &dist->blue};
146
147
148 // Somewhere at the program (constants definition)
149 /* Display size in mm */
150 // note for people expecting everything to be in meters: no, really, this is millimeters and we don't need a
151 // scaling factor
152 // float _DispDimsX = 51.7752;
153 // float _DispDimsY = 51.7752;
154 /* Half of the horizontal field of view (in radians) fovH/2 */
155 float _FoVh_2 = dist->half_fov;
156 /* Field of view aspect ratio (fovH/fovV), equals to 1 if fovH = fovV */
157 float _aspect = 1.0f;
158
159
160 // Results r/g/b.
161 struct xrt_vec2 tc[3] = {{0, 0}, {0, 0}, {0, 0}};
162
163 // Dear compiler, please vectorize.
164 for (int i = 0; i < 3; i++) {
165
166 // Just before applying the polynomial (maybe before the loop or at the beginning of it)
167 // Denormalization: conversion from uv texture coordinates (origin at bottom left corner) to mm display
168 // coordinates
169 struct xrt_vec2 XoYo = {0, 0}; // Assuming (0,0) at the center of the display: -DispDimsX/2 <= XoYo.x <=
170 // DispDimsX/2; -DispDimsY <= XoYo.y <= DispDimsY
171 XoYo.x = dist->display_size_mm.x * (u - 0.5f);
172 XoYo.y = dist->display_size_mm.y * (v - 0.5f);
173
174 struct xrt_vec2 tanH_tanV = {
175 0, 0}; // Resulting angular coordinates (tan(H), tan(V)) of input image corresponding to the
176 // coordinates of the input texture whose color will be sampled
177
178 float r2 = m_vec2_dot(XoYo, XoYo);
179 float r = sqrtf(r2);
180
181 // 9 degree polynomial (only odd coefficients)
182 struct svr_display_distortion_polynomial_values *vals = distortion_channels[i];
183 float k1 = vals->k1;
184 float k3 = vals->k3;
185 float k5 = vals->k5;
186 float k7 = vals->k7;
187 float k9 = vals->k9;
188
189 float k = r * (k1 + r2 * (k3 + r2 * (k5 + r2 * (k7 + r2 * k9))));
190
191 // Avoid problems when r = 0
192 if (r > 0) {
193 tanH_tanV.x = (k * XoYo.x) / r;
194 tanH_tanV.y = (k * XoYo.y) / r;
195 } else {
196 tanH_tanV.x = 0;
197 tanH_tanV.y = 0;
198 }
199
200 // Normalization: Transformation from angular coordinates (tan(H), tan(V)) of input image to tc
201 // (normalized coordinates with origin at the bottom left corner)
202 tc[i].x = (tanH_tanV.x + tanf(_FoVh_2)) / (2 * tanf(_FoVh_2));
203 tc[i].y = ((tanH_tanV.y + tanf(_FoVh_2) / _aspect) / (2 * tanf(_FoVh_2))) * _aspect;
204
205 // SVR_TRACE(svr, "Distortion %f %f -> %i %f %f", u, v, i, tc[i].x, tc[i].y);
206 }
207 result->r = tc[0];
208 result->g = tc[1];
209 result->b = tc[2];
210
211 return XRT_SUCCESS;
212}
213
214
215/*
216 *
217 * Create function.
218 *
219 */
220
221struct xrt_device *
222svr_hmd_create(struct svr_two_displays_distortion *distortion)
223{
224 enum u_device_alloc_flags flags =
225 (enum u_device_alloc_flags)(U_DEVICE_ALLOC_HMD | U_DEVICE_ALLOC_TRACKING_NONE);
226 struct svr_hmd *svr = U_DEVICE_ALLOCATE(struct svr_hmd, flags, 1, 0);
227
228 // Slow copy. Could refcount it but who cares, this runs once.
229 svr->distortion = *distortion;
230
231 svr->log_level = debug_get_log_option_svr_log();
232
233
234
235 svr->base.update_inputs = u_device_noop_update_inputs;
236 svr->base.get_tracked_pose = svr_hmd_get_tracked_pose;
237 svr->base.get_view_poses = svr_hmd_get_view_poses;
238 svr->base.destroy = svr_hmd_destroy;
239 svr->base.name = XRT_DEVICE_GENERIC_HMD;
240
241 // Sorta a lie, we have to do this to make the state tracker happy. (Should multi.c override these?)
242 svr->base.supported.orientation_tracking = true;
243 svr->base.supported.position_tracking = true;
244
245 svr->base.device_type = XRT_DEVICE_TYPE_HMD;
246
247 svr->base.hmd->screens[0].nominal_frame_interval_ns = (uint64_t)time_s_to_ns(1.0f / 90.0f);
248 svr->base.hmd->screens[0].scanout_direction = XRT_SCANOUT_DIRECTION_NONE;
249 svr->base.hmd->screens[0].scanout_time_ns = 0;
250
251
252 // Print name.
253 snprintf(svr->base.str, XRT_DEVICE_NAME_LEN, "SimulaVR HMD");
254 snprintf(svr->base.serial, XRT_DEVICE_NAME_LEN, "0001");
255 // Setup input.
256 svr->base.inputs[0].name = XRT_INPUT_GENERIC_HEAD_POSE;
257
258 struct u_extents_2d exts;
259
260 // one screen is 2448px wide, but there are two of them.
261 exts.w_pixels = 2448 * 2;
262 // Both screens are 2448px tall
263 exts.h_pixels = 2448;
264
265 u_extents_2d_split_side_by_side(&svr->base, &exts);
266
267 for (int view = 0; view < 2; view++) {
268 svr->base.hmd->distortion.fov[view].angle_left = -svr->distortion.views[view].half_fov;
269 svr->base.hmd->distortion.fov[view].angle_right = svr->distortion.views[view].half_fov;
270 svr->base.hmd->distortion.fov[view].angle_up = svr->distortion.views[view].half_fov;
271 svr->base.hmd->distortion.fov[view].angle_down = -svr->distortion.views[view].half_fov;
272 }
273
274 u_distortion_mesh_set_none(&svr->base);
275 svr->base.hmd->distortion.models = XRT_DISTORTION_MODEL_COMPUTE;
276 svr->base.hmd->distortion.preferred = XRT_DISTORTION_MODEL_COMPUTE;
277 svr->base.compute_distortion = svr_mesh_calc;
278
279 // Setup variable tracker.
280 u_var_add_root(svr, "Simula HMD", true);
281 svr->base.supported.orientation_tracking = true;
282 svr->base.device_type = XRT_DEVICE_TYPE_HMD;
283
284 size_t idx = 0;
285
286 //!@todo these should be true for the final product iirc but possibly not for the demo unit
287 svr->base.hmd->blend_modes[idx++] = XRT_BLEND_MODE_ADDITIVE;
288 svr->base.hmd->blend_modes[idx++] = XRT_BLEND_MODE_OPAQUE;
289 svr->base.hmd->blend_modes[idx++] = XRT_BLEND_MODE_ALPHA_BLEND;
290
291 svr->base.hmd->blend_mode_count = idx;
292
293 uint64_t start;
294 uint64_t end;
295
296 start = os_monotonic_get_ns();
297 u_distortion_mesh_fill_in_compute(&svr->base);
298 end = os_monotonic_get_ns();
299
300 float diff = (end - start);
301 diff /= U_TIME_1MS_IN_NS;
302
303 SVR_DEBUG(svr, "Filling mesh took %f ms", diff);
304
305
306 return &svr->base;
307}