The open source OpenXR runtime
1// Copyright 2013, Fredrik Hultin.
2// Copyright 2013, Jakob Bornecrantz.
3// Copyright 2015, Joey Ferwerda.
4// Copyright 2020-2025, Collabora, Ltd.
5// SPDX-License-Identifier: BSL-1.0
6/*!
7 * @file
8 * @brief Android sensors driver code.
9 * @author Lubosz Sarnecki <lubosz.sarnecki@collabora.com>
10 * @author Korcan Hussein <korcan.hussein@collabora.com>
11 * @author Simon Zeni <simon.zeni@collabora.com>
12 * @ingroup drv_android
13 */
14
15#include "android_sensors.h"
16
17#include "util/u_debug.h"
18#include "util/u_device.h"
19#include "util/u_distortion_mesh.h"
20#include "util/u_var.h"
21#include "util/u_visibility_mask.h"
22
23#include "cardboard_device.pb.h"
24#include "pb_decode.h"
25
26#include "android/android_globals.h"
27#include "android/android_content.h"
28#include "android/android_custom_surface.h"
29
30#include <xrt/xrt_config_android.h>
31
32// Workaround to avoid the inclusion of "android_native_app_glue.h.
33#ifndef LOOPER_ID_USER
34#define LOOPER_ID_USER 3
35#endif
36
37// 60 events per second (in us).
38#define POLL_RATE_USEC (1000L / 60) * 1000
39
40
41DEBUG_GET_ONCE_LOG_OPTION(android_log, "ANDROID_SENSORS_LOG", U_LOGGING_WARN)
42
43static inline struct android_device *
44android_device(struct xrt_device *xdev)
45{
46 return (struct android_device *)xdev;
47}
48
49static bool
50read_file(pb_istream_t *stream, uint8_t *buf, size_t count)
51{
52 U_LOG_E("read_file callback");
53 FILE *file = (FILE *)stream->state;
54 if (buf == NULL) {
55 while (count-- && fgetc(file) != EOF)
56 ;
57 return count == 0;
58 }
59
60 bool status = (fread(buf, 1, count, file) == count);
61
62 if (feof(file)) {
63 stream->bytes_left = 0;
64 }
65
66 return status;
67}
68
69static bool
70read_buffer(pb_istream_t *stream, const pb_field_t *field, void **arg)
71{
72 U_LOG_E("read_file callback");
73 uint8_t *buffer = (uint8_t *)*arg;
74 return pb_read(stream, buffer, stream->bytes_left);
75}
76
77static bool
78load_cardboard_distortion(struct android_device *d,
79 struct xrt_android_display_metrics *metrics,
80 struct u_cardboard_distortion_arguments *args)
81{
82 char external_storage_dir[PATH_MAX] = {0};
83 if (!android_content_get_files_dir(android_globals_get_context(), external_storage_dir,
84 sizeof(external_storage_dir))) {
85 ANDROID_ERROR(d, "failed to access files dir");
86 return false;
87 }
88
89 /* TODO: put file in Cardboard folder */
90 char device_params_file[PATH_MAX] = {0};
91 snprintf(device_params_file, sizeof(device_params_file), "%s/current_device_params", external_storage_dir);
92
93 FILE *file = fopen(device_params_file, "rb");
94 if (file == NULL) {
95 ANDROID_ERROR(d, "failed to open calibration file '%s'", device_params_file);
96 return false;
97 }
98
99 pb_istream_t stream = {&read_file, file, SIZE_MAX, NULL};
100 cardboard_DeviceParams params = cardboard_DeviceParams_init_zero;
101
102 char vendor[64] = {0};
103 params.vendor.arg = vendor;
104 params.vendor.funcs.decode = read_buffer;
105
106 char model[64] = {0};
107 params.model.arg = model;
108 params.model.funcs.decode = read_buffer;
109
110 float angles[4] = {0};
111 params.left_eye_field_of_view_angles.arg = angles;
112 params.left_eye_field_of_view_angles.funcs.decode = read_buffer;
113
114 params.distortion_coefficients.arg = args->distortion_k;
115 params.distortion_coefficients.funcs.decode = read_buffer;
116
117 if (!pb_decode(&stream, cardboard_DeviceParams_fields, ¶ms)) {
118 ANDROID_ERROR(d, "failed to read calibration file: %s", PB_GET_ERROR(&stream));
119 return false;
120 }
121
122 if (params.has_vertical_alignment) {
123 args->vertical_alignment = (enum u_cardboard_vertical_alignment)params.vertical_alignment;
124 }
125
126 if (params.has_inter_lens_distance) {
127 args->inter_lens_distance_meters = params.inter_lens_distance;
128 }
129 if (params.has_screen_to_lens_distance) {
130 args->screen_to_lens_distance_meters = params.screen_to_lens_distance;
131 }
132 if (params.has_tray_to_lens_distance) {
133 args->tray_to_lens_distance_meters = params.tray_to_lens_distance;
134 }
135
136#define DEG_TO_RAD(x) (float)(x * M_PI / 180.0)
137 args->fov = (struct xrt_fov){.angle_left = -DEG_TO_RAD(angles[0]),
138 .angle_right = DEG_TO_RAD(angles[1]),
139 .angle_down = -DEG_TO_RAD(angles[2]),
140 .angle_up = DEG_TO_RAD(angles[3])};
141#undef DEG_TO_RAD
142
143 ANDROID_INFO(d, "loaded calibration for device %s (%s)", model, vendor);
144
145 return true;
146}
147
148// Callback for the Android sensor event queue
149static int
150android_sensor_callback(ASensorEvent *event, struct android_device *d)
151{
152 struct xrt_vec3 gyro;
153 struct xrt_vec3 accel;
154
155 switch (event->type) {
156 case ASENSOR_TYPE_ACCELEROMETER: {
157 accel.x = event->acceleration.y;
158 accel.y = -event->acceleration.x;
159 accel.z = event->acceleration.z;
160
161 ANDROID_TRACE(d, "accel %" PRId64 " %.2f %.2f %.2f", event->timestamp, accel.x, accel.y, accel.z);
162 break;
163 }
164 case ASENSOR_TYPE_GYROSCOPE: {
165 gyro.x = -event->data[1];
166 gyro.y = event->data[0];
167 gyro.z = event->data[2];
168
169 ANDROID_TRACE(d, "gyro %" PRId64 " %.2f %.2f %.2f", event->timestamp, gyro.x, gyro.y, gyro.z);
170
171 // TODO: Make filter handle accelerometer
172 struct xrt_vec3 null_accel;
173
174 // Lock last and the fusion.
175 os_mutex_lock(&d->lock);
176
177 m_imu_3dof_update(&d->fusion, event->timestamp, &null_accel, &gyro);
178
179 // Now done.
180 os_mutex_unlock(&d->lock);
181 }
182 default: ANDROID_TRACE(d, "Unhandled event type %d", event->type);
183 }
184
185 return 1;
186}
187
188static inline int32_t
189android_get_sensor_poll_rate(const struct android_device *d)
190{
191 const float freq_multiplier = 1.0f / 3.0f;
192 return (d == NULL) ? POLL_RATE_USEC
193 : (int32_t)(d->base.hmd->screens[0].nominal_frame_interval_ns * freq_multiplier * 0.001f);
194}
195
196static void *
197android_run_thread(void *ptr)
198{
199 struct android_device *d = (struct android_device *)ptr;
200 const int32_t poll_rate_usec = android_get_sensor_poll_rate(d);
201 // Maximum waiting time for sensor events.
202 static const int max_wait_milliseconds = 100;
203 ASensorManager *sensor_manager = NULL;
204 const ASensor *accelerometer = NULL;
205 const ASensor *gyroscope = NULL;
206 ASensorEventQueue *event_queue = NULL;
207#if __ANDROID_API__ >= 26
208 sensor_manager = ASensorManager_getInstanceForPackage(XRT_ANDROID_PACKAGE);
209#else
210 sensor_manager = ASensorManager_getInstance();
211#endif
212
213 accelerometer = ASensorManager_getDefaultSensor(sensor_manager, ASENSOR_TYPE_ACCELEROMETER);
214 gyroscope = ASensorManager_getDefaultSensor(sensor_manager, ASENSOR_TYPE_GYROSCOPE);
215
216 ALooper *event_looper = ALooper_forThread();
217 if (event_looper == NULL) {
218 event_looper = ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS);
219 ANDROID_INFO(d,
220 "Created new event event_looper for "
221 "sensor capture thread.");
222 }
223
224 event_queue = ASensorManager_createEventQueue(sensor_manager, event_looper, LOOPER_ID_USER, NULL, (void *)d);
225
226
227 /*
228 * Start sensors in case this was not done already.
229 *
230 * On some Android devices, such as Pixel 4 and Meizu 20 series, running
231 * apps was not smooth due to the failure in setting the sensor's event
232 * rate. This was caused by the calculated sensor's event rate based on
233 * the screen refresh rate, which could be smaller than the sensor's
234 * minimum delay value. Make sure to set it to a valid value.
235 */
236 if (accelerometer != NULL) {
237 int32_t accelerometer_min_delay = ASensor_getMinDelay(accelerometer);
238 int32_t accelerometer_poll_rate_usec = MAX(poll_rate_usec, accelerometer_min_delay);
239
240 ASensorEventQueue_enableSensor(event_queue, accelerometer);
241 ASensorEventQueue_setEventRate(event_queue, accelerometer, accelerometer_poll_rate_usec);
242 }
243 if (gyroscope != NULL) {
244 int32_t gyroscope_min_delay = ASensor_getMinDelay(gyroscope);
245 int32_t gyroscope_poll_rate_usec = MAX(poll_rate_usec, gyroscope_min_delay);
246
247 ASensorEventQueue_enableSensor(event_queue, gyroscope);
248 ASensorEventQueue_setEventRate(event_queue, gyroscope, gyroscope_poll_rate_usec);
249 }
250
251 while (os_thread_helper_is_running(&d->oth)) {
252 int num_events = 0;
253 const int looper_id = ALooper_pollOnce(max_wait_milliseconds, NULL, &num_events, NULL);
254 // The device may have enabled a power-saving policy, causing the sensor to sleep and return
255 // ALOOPER_POLL_ERROR. However, we want to continue reading data when it wakes up.
256 if (looper_id != LOOPER_ID_USER) {
257 ANDROID_ERROR(d, "ALooper_pollAll failed with looper_id: %d", looper_id);
258 continue;
259 }
260 if (num_events <= 0) {
261 ANDROID_ERROR(d, "ALooper_pollAll returned zero events");
262 continue;
263 }
264 // read event
265 ASensorEvent event;
266 while (ASensorEventQueue_getEvents(event_queue, &event, 1) > 0) {
267 android_sensor_callback(&event, d);
268 }
269 }
270 // Disable sensors.
271 if (accelerometer != NULL) {
272 ASensorEventQueue_disableSensor(event_queue, accelerometer);
273 }
274 if (gyroscope != NULL) {
275 ASensorEventQueue_disableSensor(event_queue, gyroscope);
276 }
277 // Destroy the event queue.
278 ASensorManager_destroyEventQueue(sensor_manager, event_queue);
279 ANDROID_INFO(d, "android_run_thread exit");
280 return NULL;
281}
282
283
284/*
285 *
286 * Device functions.
287 *
288 */
289
290static void
291android_device_destroy(struct xrt_device *xdev)
292{
293 struct android_device *android = android_device(xdev);
294
295 // Destroy the thread object.
296 os_thread_helper_destroy(&android->oth);
297
298 // Now that the thread is not running we can destroy the lock.
299 os_mutex_destroy(&android->lock);
300
301 // Destroy the fusion.
302 m_imu_3dof_close(&android->fusion);
303
304 // Remove the variable tracking.
305 u_var_remove_root(android);
306
307 free(android);
308}
309
310static xrt_result_t
311android_device_get_tracked_pose(struct xrt_device *xdev,
312 enum xrt_input_name name,
313 int64_t at_timestamp_ns,
314 struct xrt_space_relation *out_relation)
315{
316 (void)at_timestamp_ns;
317
318 struct android_device *d = android_device(xdev);
319
320 struct xrt_space_relation new_relation = XRT_SPACE_RELATION_ZERO;
321 new_relation.pose.orientation = d->fusion.rot;
322
323 //! @todo assuming that orientation is actually currently tracked.
324 new_relation.relation_flags = (enum xrt_space_relation_flags)(XRT_SPACE_RELATION_ORIENTATION_VALID_BIT |
325 XRT_SPACE_RELATION_ORIENTATION_TRACKED_BIT |
326 XRT_SPACE_RELATION_POSITION_VALID_BIT);
327
328 *out_relation = new_relation;
329 return XRT_SUCCESS;
330}
331
332
333/*
334 *
335 * Prober functions.
336 *
337 */
338
339static xrt_result_t
340android_device_compute_distortion(
341 struct xrt_device *xdev, uint32_t view, float u, float v, struct xrt_uv_triplet *result)
342{
343 struct android_device *d = android_device(xdev);
344 u_compute_distortion_cardboard(&d->cardboard.values[view], u, v, result);
345 return XRT_SUCCESS;
346}
347
348
349struct android_device *
350android_device_create(void)
351{
352 enum u_device_alloc_flags flags =
353 (enum u_device_alloc_flags)(U_DEVICE_ALLOC_HMD | U_DEVICE_ALLOC_TRACKING_NONE);
354 struct android_device *d = U_DEVICE_ALLOCATE(struct android_device, flags, 1, 0);
355
356 d->base.name = XRT_DEVICE_GENERIC_HMD;
357 d->base.destroy = android_device_destroy;
358 d->base.update_inputs = u_device_noop_update_inputs;
359 d->base.set_output = u_device_ni_set_output;
360 d->base.get_tracked_pose = android_device_get_tracked_pose;
361 d->base.get_view_poses = u_device_get_view_poses;
362 d->base.get_visibility_mask = u_device_get_visibility_mask;
363 d->base.compute_distortion = android_device_compute_distortion;
364 d->base.inputs[0].name = XRT_INPUT_GENERIC_HEAD_POSE;
365 d->base.device_type = XRT_DEVICE_TYPE_HMD;
366 snprintf(d->base.str, XRT_DEVICE_NAME_LEN, "Android Sensors");
367 snprintf(d->base.serial, XRT_DEVICE_NAME_LEN, "Android Sensors");
368
369 d->log_level = debug_get_log_option_android_log();
370
371 m_imu_3dof_init(&d->fusion, M_IMU_3DOF_USE_GRAVITY_DUR_20MS);
372
373 int ret = os_mutex_init(&d->lock);
374 if (ret != 0) {
375 U_LOG_E("Failed to init mutex!");
376 android_device_destroy(&d->base);
377 return 0;
378 }
379
380 struct xrt_android_display_metrics metrics;
381 if (!android_custom_surface_get_display_metrics(android_globals_get_vm(), android_globals_get_context(),
382 &metrics)) {
383 U_LOG_E("Could not get Android display metrics.");
384 /* Fallback to default values (Pixel 3) */
385 metrics.width_pixels = 2960;
386 metrics.height_pixels = 1440;
387 metrics.density_dpi = 572;
388 metrics.refresh_rate = 60.0f;
389 }
390
391 d->base.hmd->screens[0].nominal_frame_interval_ns = time_s_to_ns(1.0f / metrics.refresh_rate);
392 d->base.hmd->screens[0].scanout_direction = XRT_SCANOUT_DIRECTION_NONE;
393 d->base.hmd->screens[0].scanout_time_ns = 0;
394
395 const uint32_t w_pixels = metrics.width_pixels;
396 const uint32_t h_pixels = metrics.height_pixels;
397
398 const float angle = 45 * M_PI / 180.0; // 0.698132; // 40Deg in rads
399 const float w_meters = ((float)w_pixels / (float)metrics.xdpi) * 0.0254f;
400 const float h_meters = ((float)h_pixels / (float)metrics.ydpi) * 0.0254f;
401
402 const struct u_cardboard_distortion_arguments cardboard_v1_distortion_args = {
403 .distortion_k = {0.441f, 0.156f, 0.f, 0.f, 0.f},
404 .screen =
405 {
406 .w_pixels = w_pixels,
407 .h_pixels = h_pixels,
408 .w_meters = w_meters,
409 .h_meters = h_meters,
410 },
411 .inter_lens_distance_meters = 0.06f,
412 .screen_to_lens_distance_meters = 0.042f,
413 .tray_to_lens_distance_meters = 0.035f,
414 .fov =
415 {
416 .angle_left = -angle,
417 .angle_right = angle,
418 .angle_up = angle,
419 .angle_down = -angle,
420 },
421 .vertical_alignment = U_CARDBOARD_VERTICAL_ALIGNMENT_BOTTOM,
422 };
423 struct u_cardboard_distortion_arguments args = cardboard_v1_distortion_args;
424 if (!load_cardboard_distortion(d, &metrics, &args)) {
425 ANDROID_WARN(
426 d, "Failed to load cardboard calibration file, falling back to Cardboard V1 distortion values");
427 args = cardboard_v1_distortion_args;
428 }
429
430 u_distortion_cardboard_calculate(&args, d->base.hmd, &d->cardboard);
431
432 // Distortion information.
433 u_distortion_mesh_fill_in_compute(&d->base);
434
435 // Everything done, finally start the thread.
436 os_thread_helper_init(&d->oth);
437 ret = os_thread_helper_start(&d->oth, android_run_thread, d);
438 if (ret != 0) {
439 ANDROID_ERROR(d, "Failed to start thread!");
440 android_device_destroy(&d->base);
441 return NULL;
442 }
443
444 u_var_add_root(d, "Android phone", true);
445 u_var_add_log_level(d, &d->log_level, "log_level");
446 u_var_add_ro_vec3_f32(d, &d->fusion.last.accel, "last.accel");
447 u_var_add_ro_vec3_f32(d, &d->fusion.last.gyro, "last.gyro");
448
449 d->base.supported.orientation_tracking = true;
450 d->base.supported.position_tracking = false;
451
452 ANDROID_DEBUG(d, "Created device!");
453
454 return d;
455}