The open source OpenXR runtime
1// Copyright 2021-2023, Collabora, Ltd.
2// SPDX-License-Identifier: BSL-1.0
3/*!
4 * @file
5 * @brief RealSense device tracked with host-SLAM.
6 * @author Mateo de Mayo <mateo.demayo@collabora.com>
7 * @ingroup drv_realsense
8 *
9 * Originally created and tried on the D455 model but should work in any
10 * RealSense device that has video and IMU streams.
11 *
12 * Be aware that you need to properly set the SLAM_CONFIG file to match
13 * your camera specifics (stereo/mono, intrinsics, extrinsics, etc).
14 */
15
16#include "xrt/xrt_device.h"
17#include "xrt/xrt_frameserver.h"
18#include "xrt/xrt_frame.h"
19#include "xrt/xrt_prober.h"
20#include "xrt/xrt_config_have.h"
21#include "xrt/xrt_config_build.h"
22
23#include "math/m_filter_fifo.h"
24#include "math/m_space.h"
25
26#include "os/os_time.h"
27
28#include "util/u_device.h"
29#include "util/u_logging.h"
30#include "util/u_debug.h"
31#include "util/u_var.h"
32#include "util/u_sink.h"
33#include "util/u_config_json.h"
34#include "util/u_format.h"
35
36#include "tracking/t_tracking.h"
37
38#include "rs_driver.h"
39
40#include <math.h>
41#include <stdio.h>
42#include <assert.h>
43#include <librealsense2/rs.h>
44#include <librealsense2/h/rs_pipeline.h>
45
46
47// These defaults come from a D455 camera, they might not work for other devices
48#define DEFAULT_STEREO true
49#define DEFAULT_XRT_VIDEO_FORMAT XRT_FORMAT_L8
50#define DEFAULT_VIDEO_FORMAT RS2_FORMAT_Y8
51#define DEFAULT_VIDEO_WIDTH 640
52#define DEFAULT_VIDEO_HEIGHT 360
53#define DEFAULT_VIDEO_FPS 30
54#define DEFAULT_VIDEO_CHANGE_EXPOSURE true
55#define DEFAULT_VIDEO_AUTOEXPOSURE false
56#define DEFAULT_VIDEO_EXPOSURE 6000
57#define DEFAULT_VIDEO_GAIN 127
58#define DEFAULT_GYRO_FPS 200
59#define DEFAULT_ACCEL_FPS 250
60#define DEFAULT_STREAM_TYPE RS2_STREAM_INFRARED
61#define DEFAULT_STREAM1_INDEX 1
62#define DEFAULT_STREAM2_INDEX 2
63
64#define RS_DEVICE_STR "Intel RealSense Host-SLAM"
65#define RS_SOURCE_STR "RealSense Source"
66#define RS_HOST_SLAM_TRACKER_STR "Host SLAM Tracker for RealSense"
67
68#define RS_TRACE(r, ...) U_LOG_IFL_T(r->log_level, __VA_ARGS__)
69#define RS_DEBUG(r, ...) U_LOG_IFL_D(r->log_level, __VA_ARGS__)
70#define RS_INFO(r, ...) U_LOG_IFL_I(r->log_level, __VA_ARGS__)
71#define RS_WARN(r, ...) U_LOG_IFL_W(r->log_level, __VA_ARGS__)
72#define RS_ERROR(r, ...) U_LOG_IFL_E(r->log_level, __VA_ARGS__)
73#define RS_ASSERT(predicate, ...) \
74 do { \
75 bool p = predicate; \
76 if (!p) { \
77 U_LOG(U_LOGGING_ERROR, __VA_ARGS__); \
78 assert(false && "RS_ASSERT failed: " #predicate); \
79 exit(EXIT_FAILURE); \
80 } \
81 } while (false);
82#define RS_ASSERT_(predicate) RS_ASSERT(predicate, "Assertion failed " #predicate)
83
84// Debug assertions, not vital but useful for finding errors
85#ifdef NDEBUG
86#define RS_DASSERT(predicate, ...) (void)(predicate)
87#define RS_DASSERT_(predicate) (void)(predicate)
88#else
89#define RS_DASSERT(predicate, ...) RS_ASSERT(predicate, __VA_ARGS__)
90#define RS_DASSERT_(predicate) RS_ASSERT_(predicate)
91#endif
92
93//! Utility for realsense API calls that can produce errors
94#define DO(call, ...) \
95 call(__VA_ARGS__, &rs->rsc.error_status); \
96 check_error(rs, rs->rsc.error_status, __FILE__, __LINE__)
97
98//! Alternative to DO() with no arguments
99#define DO_(call) \
100 call(&rs->rsc.error_status); \
101 check_error(rs, rs->rsc.error_status, __FILE__, __LINE__)
102
103//! @todo Use one RS_LOG option for the entire driver
104DEBUG_GET_ONCE_LOG_OPTION(rs_log, "RS_HDEV_LOG", U_LOGGING_WARN)
105
106// Forward declarations
107static void
108receive_left_frame(struct xrt_frame_sink *sink, struct xrt_frame *);
109static void
110receive_right_frame(struct xrt_frame_sink *sink, struct xrt_frame *);
111static void
112receive_imu_sample(struct xrt_imu_sink *sink, struct xrt_imu_sample *);
113static void
114rs_source_node_break_apart(struct xrt_frame_node *);
115static void
116rs_source_node_destroy(struct xrt_frame_node *);
117
118/*!
119 * Host-SLAM tracked RealSense device (any RealSense device with camera and IMU streams).
120 *
121 * @implements xrt_device
122 */
123struct rs_hdev
124{
125 struct xrt_device xdev;
126 struct xrt_tracked_slam *slam;
127 struct xrt_pose pose; //!< Device pose
128 struct xrt_pose offset; //!< Additional offset to apply to `pose`
129 enum u_logging_level log_level; //!< Log level
130};
131
132/*!
133 * RealSense source of camera and IMU data.
134 *
135 * @implements xrt_fs
136 * @implements xrt_frame_node
137 */
138struct rs_source
139{
140 struct xrt_fs xfs;
141 struct xrt_frame_node node;
142 enum u_logging_level log_level; //!< Log level
143
144 // Sinks
145 struct xrt_frame_sink left_sink; //!< Intermediate sink for left camera frames
146 struct xrt_frame_sink right_sink; //!< Intermediate sink for right camera frames
147 struct xrt_imu_sink imu_sink; //!< Intermediate sink for IMU samples
148 struct xrt_slam_sinks in_sinks; //!< Pointers to intermediate sinks
149 struct xrt_slam_sinks out_sinks; //!< Pointers to downstream sinks
150
151 // UI Sinks
152 struct u_sink_debug ui_left_sink; //!< Sink to display left frames in UI
153 struct u_sink_debug ui_right_sink; //!< Sink to display right frames in UI
154 struct m_ff_vec3_f32 *gyro_ff; //!< Queue of gyroscope data to display in UI
155 struct m_ff_vec3_f32 *accel_ff; //!< Queue of accelerometer data to display in UI
156
157 // UI Exposure/Gain settings
158 bool ui_autoexposure; //!< Autoexposure value to set
159 struct u_var_draggable_f32 ui_exposure; //!< Exposure value to set
160 struct u_var_draggable_f32 ui_gain; //!< Gain value to set
161 struct u_var_button ui_btn_apply; //!< Apply changes button
162
163 struct rs_container rsc; //!< Container of RealSense API objects
164
165 // Properties loaded from json file and used when configuring the realsense pipeline
166 bool stereo; //!< Indicates whether to use one or two cameras
167 rs2_format video_format; //!< Indicates desired frame color format
168 enum xrt_format xrt_video_format; //!< corresponding format for video_format
169 int video_width; //!< Indicates desired frame width
170 int video_height; //!< Indicates desired frame height
171 int video_fps; //!< Indicates desired fps
172 bool video_change_exposure; //!< Indicates whether to overwrite external device exposure settings
173 bool video_autoexposure; //!< Indicates whether to enable autoexposure or use provided values
174 int video_exposure; //!< Indicates desired exposure time in microseconds
175 int video_gain; //!< Indicates desired gain (16-248)
176 int gyro_fps; //!< Indicates desired gyroscope samples per second
177 int accel_fps; //!< Indicates desired accelerometer samples per second
178 rs2_stream stream_type; //!< Indicates desired stream type for the cameras
179 int stream1_index; //!< Indicates desired stream index for first stream
180 int stream2_index; //!< Indicates desired stream index for second stream
181
182 bool is_running; //!< Whether the device is streaming
183
184 //! Very simple struct to merge the two acc/gyr streams into one IMU stream.
185 //! It just pushes on every gyro sample and reuses the latest acc sample.
186 struct
187 {
188 struct os_mutex mutex; //!< Gyro and accel come from separate threads
189 struct xrt_vec3 accel; //!< Last received accelerometer values
190 struct xrt_vec3 gyro; //!< Last received gyroscope values
191 } partial_imu_sample;
192};
193
194//! @todo Unify check_error() and DO() usage thorough the driver.
195static bool
196check_error(struct rs_source *rs, rs2_error *e, const char *file, int line)
197{
198 if (e == NULL) {
199 return false; // No errors
200 }
201
202 RS_ERROR(rs, "rs_error was raised when calling %s(%s):", rs2_get_failed_function(e), rs2_get_failed_args(e));
203 RS_ERROR(rs, "%s:%d: %s", file, line, rs2_get_error_message(e));
204 exit(EXIT_FAILURE);
205}
206
207
208/*
209 *
210 * Device functionality
211 *
212 */
213
214static inline struct rs_hdev *
215rs_hdev_from_xdev(struct xrt_device *xdev)
216{
217 struct rs_hdev *rh = container_of(xdev, struct rs_hdev, xdev);
218 return rh;
219}
220
221//! Specific pose corrections for Kimera and the D455 camera
222XRT_MAYBE_UNUSED static inline struct xrt_pose
223rs_hdev_correct_pose_from_kimera(struct xrt_pose pose)
224{
225 // Correct swapped axes
226 struct xrt_pose swapped = {0};
227 swapped.position.x = -pose.position.y;
228 swapped.position.y = -pose.position.z;
229 swapped.position.z = pose.position.x;
230 swapped.orientation.x = -pose.orientation.y;
231 swapped.orientation.y = -pose.orientation.z;
232 swapped.orientation.z = pose.orientation.x;
233 swapped.orientation.w = pose.orientation.w;
234
235 // Correct orientation
236 //! @todo Encode this transformation into constants
237 struct xrt_space_relation out_relation;
238 struct xrt_relation_chain relation_chain = {0};
239 struct xrt_pose pre_correction = {{-0.5, -0.5, -0.5, 0.5}, {0, 0, 0}}; // euler(90, 90, 0)
240 float sin45 = 0.7071067811865475;
241 struct xrt_pose pos_correction = {{sin45, 0, sin45, 0}, {0, 0, 0}}; // euler(180, 90, 0)
242 m_relation_chain_push_pose(&relation_chain, &pre_correction);
243 m_relation_chain_push_pose(&relation_chain, &swapped);
244 m_relation_chain_push_pose(&relation_chain, &pos_correction);
245 m_relation_chain_resolve(&relation_chain, &out_relation);
246 return out_relation.pose;
247}
248
249//! Specific pose corrections for Basalt and the D455 camera
250XRT_MAYBE_UNUSED static inline struct xrt_pose
251rs_hdev_correct_pose_from_basalt(struct xrt_pose pose)
252{
253 // Correct swapped axes
254 struct xrt_pose swapped = {0};
255 swapped.position.x = pose.position.x;
256 swapped.position.y = -pose.position.y;
257 swapped.position.z = -pose.position.z;
258 swapped.orientation.x = pose.orientation.x;
259 swapped.orientation.y = -pose.orientation.y;
260 swapped.orientation.z = -pose.orientation.z;
261 swapped.orientation.w = pose.orientation.w;
262
263 // Correct orientation
264 //! @todo Encode this transformation into constants
265 struct xrt_space_relation out_relation;
266 struct xrt_relation_chain relation_chain = {0};
267 const float sin45 = 0.7071067811865475;
268 struct xrt_pose pos_correction = {{sin45, 0, 0, sin45}, {0, 0, 0}}; // euler(90, 0, 0)
269
270 m_relation_chain_push_pose(&relation_chain, &swapped);
271 m_relation_chain_push_pose(&relation_chain, &pos_correction);
272 m_relation_chain_resolve(&relation_chain, &out_relation);
273 return out_relation.pose;
274}
275
276static xrt_result_t
277rs_hdev_get_tracked_pose(struct xrt_device *xdev,
278 enum xrt_input_name name,
279 int64_t at_timestamp_ns,
280 struct xrt_space_relation *out_relation)
281{
282 struct rs_hdev *rh = rs_hdev_from_xdev(xdev);
283 RS_ASSERT_(rh->slam != NULL);
284 RS_ASSERT_(at_timestamp_ns < INT64_MAX);
285
286 xrt_tracked_slam_get_tracked_pose(rh->slam, at_timestamp_ns, out_relation);
287
288 int pose_bits = XRT_SPACE_RELATION_ORIENTATION_TRACKED_BIT | XRT_SPACE_RELATION_POSITION_TRACKED_BIT;
289 bool pose_tracked = out_relation->relation_flags & pose_bits;
290
291 if (pose_tracked) {
292#ifdef XRT_FEATURE_SLAM
293 // !todo Correct pose depending on the VIT system in use, this should be done in the system itself.
294 // For now, assume that we are using Basalt.
295 rh->pose = rs_hdev_correct_pose_from_basalt(out_relation->pose);
296#else
297 rh->pose = out_relation->pose;
298#endif
299 }
300
301 struct xrt_relation_chain relation_chain = {0};
302 m_relation_chain_push_pose(&relation_chain, &rh->pose);
303 m_relation_chain_push_pose(&relation_chain, &rh->offset);
304 m_relation_chain_resolve(&relation_chain, out_relation);
305 out_relation->relation_flags = (enum xrt_space_relation_flags)(
306 XRT_SPACE_RELATION_ORIENTATION_VALID_BIT | XRT_SPACE_RELATION_POSITION_VALID_BIT |
307 XRT_SPACE_RELATION_ORIENTATION_TRACKED_BIT | XRT_SPACE_RELATION_POSITION_TRACKED_BIT);
308
309 return XRT_SUCCESS;
310}
311
312static void
313rs_hdev_destroy(struct xrt_device *xdev)
314{
315 struct rs_hdev *rh = rs_hdev_from_xdev(xdev);
316 RS_INFO(rh, "Destroying rs_hdev");
317 u_var_remove_root(rh);
318 u_device_free(&rh->xdev);
319}
320
321
322/*
323 *
324 * JSON functionality
325 *
326 */
327
328#define JSON_CONFIG_FIELD_NAME "config_realsense_hdev"
329
330//! Helper function for loading an int field from a json container and printing useful
331//! messages along it. *out is expected to come preloaded with a default value.
332static void
333json_int(struct rs_source *rs, const cJSON *json, const char *field, int *out)
334{
335 if (!u_json_get_int(u_json_get(json, field), out)) {
336 // This is a warning because we want the user to set these config fields
337 RS_WARN(rs, "Using default %s.%s=%d", JSON_CONFIG_FIELD_NAME, field, *out);
338 } else {
339 RS_DEBUG(rs, "Using %s.%s=%d", JSON_CONFIG_FIELD_NAME, field, *out);
340 }
341}
342
343//! Similar to @ref json_int but for bools.
344static void
345json_bool(struct rs_source *rs, const cJSON *json, const char *field, bool *out)
346{
347 if (!u_json_get_bool(u_json_get(json, field), out)) {
348 // This is a warning because we want the user to set these config fields
349 RS_WARN(rs, "Using default %s.%s=%s", JSON_CONFIG_FIELD_NAME, field, *out ? "true" : "false");
350 } else {
351 RS_DEBUG(rs, "Using %s.%s=%s", JSON_CONFIG_FIELD_NAME, field, *out ? "true" : "false");
352 }
353}
354
355//! Similar to @ref json_int but for a video rs2_format, also sets the
356//! equivalent xrt_format if any.
357static void
358json_rs2_format(
359 struct rs_source *rs, const cJSON *json, const char *field, rs2_format *out_rformat, enum xrt_format *out_xformat)
360{
361 int format_int = (int)*out_rformat;
362 bool valid_field = u_json_get_int(u_json_get(json, field), &format_int);
363 if (!valid_field) {
364 RS_WARN(rs, "Using default %s.%s=%d (%d)", JSON_CONFIG_FIELD_NAME, field, *out_rformat, *out_xformat);
365 return;
366 }
367
368 rs2_format rformat = (rs2_format)format_int;
369 enum xrt_format xformat;
370 if (rformat == RS2_FORMAT_Y8) {
371 xformat = XRT_FORMAT_L8;
372 } else if (rformat == RS2_FORMAT_RGB8 || rformat == RS2_FORMAT_BGR8) {
373 xformat = XRT_FORMAT_R8G8B8;
374 } else {
375 RS_ERROR(rs, "Invalid %s.%s=%d", JSON_CONFIG_FIELD_NAME, field, format_int);
376 RS_ERROR(rs, "Valid values: %d, %d, %d", RS2_FORMAT_Y8, RS2_FORMAT_RGB8, RS2_FORMAT_BGR8);
377 RS_ERROR(rs, "Using default %s.%s=%d (%d)", JSON_CONFIG_FIELD_NAME, field, *out_rformat, *out_xformat);
378
379 // Reaching this doesn't mean that a matching xrt_format doesn't exist, just
380 // that I didn't need it. Feel free to add it.
381
382 return;
383 }
384
385 *out_rformat = rformat;
386 *out_xformat = xformat;
387 RS_DEBUG(rs, "Using %s.%s=%d (xrt_format=%d)", JSON_CONFIG_FIELD_NAME, field, *out_rformat, *out_xformat);
388}
389
390//! Similar to @ref json_int but for a rs2_stream type.
391static void
392json_rs2_stream(struct rs_source *rs, const cJSON *json, const char *field, rs2_stream *out_stream)
393{
394 int stream_int = (int)*out_stream;
395 bool valid_field = u_json_get_int(u_json_get(json, field), &stream_int);
396 if (!valid_field) {
397 RS_WARN(rs, "Using default %s.%s=%d", JSON_CONFIG_FIELD_NAME, field, *out_stream);
398 return;
399 }
400
401 rs2_stream rstream = (rs2_stream)stream_int;
402 if (rstream != RS2_STREAM_COLOR && rstream != RS2_STREAM_INFRARED && rstream != RS2_STREAM_FISHEYE) {
403 RS_ERROR(rs, "Invalid %s.%s=%d", JSON_CONFIG_FIELD_NAME, field, stream_int);
404 RS_ERROR(rs, "Valid values: %d, %d, %d", RS2_STREAM_COLOR, RS2_STREAM_INFRARED, RS2_STREAM_FISHEYE);
405 RS_ERROR(rs, "Using default %s.%s=%d", JSON_CONFIG_FIELD_NAME, field, *out_stream);
406 return;
407 }
408
409 *out_stream = rstream;
410 RS_DEBUG(rs, "Using %s.%s=%d", JSON_CONFIG_FIELD_NAME, field, *out_stream);
411}
412
413static void
414rs_source_load_stream_options_from_json(struct rs_source *rs)
415{
416 // Set default values
417 rs->stereo = DEFAULT_STEREO;
418 rs->xrt_video_format = DEFAULT_XRT_VIDEO_FORMAT;
419 rs->video_format = DEFAULT_VIDEO_FORMAT;
420 rs->video_width = DEFAULT_VIDEO_WIDTH;
421 rs->video_height = DEFAULT_VIDEO_HEIGHT;
422 rs->video_fps = DEFAULT_VIDEO_FPS;
423 rs->video_change_exposure = DEFAULT_VIDEO_CHANGE_EXPOSURE;
424 rs->video_autoexposure = DEFAULT_VIDEO_AUTOEXPOSURE;
425 rs->video_exposure = DEFAULT_VIDEO_EXPOSURE;
426 rs->video_gain = DEFAULT_VIDEO_GAIN;
427 rs->gyro_fps = DEFAULT_GYRO_FPS;
428 rs->accel_fps = DEFAULT_ACCEL_FPS;
429 rs->stream_type = DEFAULT_STREAM_TYPE;
430 rs->stream1_index = DEFAULT_STREAM1_INDEX;
431 rs->stream2_index = DEFAULT_STREAM2_INDEX;
432
433 struct u_config_json config = {0};
434 u_config_json_open_or_create_main_file(&config);
435 if (!config.file_loaded) {
436 RS_WARN(rs, "Unable to load config file, will use default stream values");
437 cJSON_Delete(config.root);
438 return;
439 }
440
441 const cJSON *hdev_config = u_json_get(config.root, JSON_CONFIG_FIELD_NAME);
442 if (hdev_config == NULL) {
443 RS_WARN(rs, "Field '%s' not present in json file, will use defaults", JSON_CONFIG_FIELD_NAME);
444 }
445
446 json_bool(rs, hdev_config, "stereo", &rs->stereo);
447 json_rs2_format(rs, hdev_config, "video_format", &rs->video_format, &rs->xrt_video_format);
448 json_int(rs, hdev_config, "video_width", &rs->video_width);
449 json_int(rs, hdev_config, "video_height", &rs->video_height);
450 json_int(rs, hdev_config, "video_fps", &rs->video_fps);
451 json_bool(rs, hdev_config, "video_change_exposure", &rs->video_change_exposure);
452 json_bool(rs, hdev_config, "video_autoexposure", &rs->video_autoexposure);
453 json_int(rs, hdev_config, "video_exposure", &rs->video_exposure);
454 json_int(rs, hdev_config, "video_gain", &rs->video_gain);
455 json_int(rs, hdev_config, "gyro_fps", &rs->gyro_fps);
456 json_int(rs, hdev_config, "accel_fps", &rs->accel_fps);
457 json_rs2_stream(rs, hdev_config, "stream_type", &rs->stream_type);
458 json_int(rs, hdev_config, "stream1_index", &rs->stream1_index);
459 json_int(rs, hdev_config, "stream2_index", &rs->stream2_index);
460
461 cJSON_Delete(config.root);
462}
463
464
465/*
466 *
467 * Realsense functionality
468 *
469 */
470
471//! Set an option for all sensors. Return whether it was set for any.
472static bool
473set_option_in_all_sensors(struct rs_source *rs, enum rs2_option option, float value)
474{
475 struct rs_container *rsc = &rs->rsc;
476 rs2_sensor_list *sensors = DO(rs2_query_sensors, rsc->device);
477 int sensors_count = DO(rs2_get_sensors_count, sensors);
478 bool any_set = false;
479 for (int i = 0; i < sensors_count; i++) {
480 rs2_sensor *sensor = DO(rs2_create_sensor, sensors, i);
481 rs2_options *sensor_options = (rs2_options *)sensor;
482 bool has_option = DO(rs2_supports_option, sensor_options, option);
483 if (has_option) {
484 float min, max, step, def;
485 DO(rs2_get_option_range, sensor_options, option, &min, &max, &step, &def);
486 bool valid_value = value >= min && value <= max;
487 if (valid_value) {
488 DO(rs2_set_option, sensor_options, option, value);
489 any_set = true;
490 } else {
491 float cur = rs2_get_option(sensor_options, option, NULL);
492 const char *option_desc = DO(rs2_get_option_description, sensor_options, option);
493 const char *option_name = DO(rs2_get_option_name, sensor_options, option);
494 const char *sensor_name = DO(rs2_get_sensor_info, sensor, RS2_CAMERA_INFO_NAME);
495 RS_WARN(rs, "Unable to update sensor %s", sensor_name);
496 RS_WARN(rs, "Invalid value=%f for option %s ('%s')", value, option_name, option_desc);
497 RS_WARN(rs, "Min=%f Max=%f Step=%f Current=%f", min, max, step, cur);
498 }
499 }
500 rs2_delete_sensor(sensor);
501 }
502 rs2_delete_sensor_list(sensors);
503
504 return any_set;
505}
506
507//! Submit changes to supported options to the device
508static void
509update_options(struct rs_source *rs)
510{
511 set_option_in_all_sensors(rs, RS2_OPTION_EXPOSURE, rs->video_exposure);
512 set_option_in_all_sensors(rs, RS2_OPTION_GAIN, rs->video_gain);
513 set_option_in_all_sensors(rs, RS2_OPTION_ENABLE_AUTO_EXPOSURE, rs->video_autoexposure);
514}
515
516/*
517 *
518 * Stream functionality
519 *
520 */
521
522static void
523rs_source_frame_destroy(struct xrt_frame *xf)
524{
525 rs2_frame *rframe = (rs2_frame *)xf->owner;
526 rs2_release_frame(rframe);
527 free(xf);
528}
529static inline timepoint_ns
530get_frame_monotonic_ts_from(struct rs_source *rs, rs2_frame *frame, uint64_t unow_monotonic, uint64_t unow_realtime)
531{
532 RS_DASSERT_(unow_monotonic < INT64_MAX && unow_realtime < INT64_MAX);
533 timepoint_ns now_monotonic = unow_monotonic;
534 timepoint_ns now_realtime = unow_realtime;
535
536 double timestamp_ms = DO(rs2_get_frame_timestamp, frame);
537 timepoint_ns device_ts = timestamp_ms * U_TIME_1MS_IN_NS;
538
539 // Assertion commented because GLOBAL_TIME makes ts be a bit in the future
540 // RS_DASSERT_((timepoint_ns)now_realtime > ts);
541
542 timepoint_ns monotonic_ts = now_monotonic - (now_realtime - device_ts);
543 return monotonic_ts;
544}
545
546
547//! Convert device_ts to monotonic clock.
548//! Assumes now_realtime and device_ts are in the same clock domain (GLOBAL_TIME).
549static inline timepoint_ns
550get_frame_monotonic_ts(struct rs_source *rs, rs2_frame *frame)
551{
552 uint64_t now_monotonic = os_monotonic_get_ns();
553 uint64_t now_realtime = os_realtime_get_ns();
554 return get_frame_monotonic_ts_from(rs, frame, now_monotonic, now_realtime);
555}
556
557
558static void
559rs2xrt_frame(struct rs_source *rs, rs2_frame *rframe, struct xrt_frame **out_xframe)
560{
561 RS_ASSERT_(*out_xframe == NULL);
562
563 uint64_t number = DO(rs2_get_frame_number, rframe);
564 double timestamp_ms = DO(rs2_get_frame_timestamp, rframe);
565 uint8_t *data = (uint8_t *)DO(rs2_get_frame_data, rframe);
566 int bytes_per_pixel = u_format_block_size(rs->xrt_video_format);
567 int stride = rs->video_width * bytes_per_pixel;
568
569#ifndef NDEBUG // Debug only: check that the realsense stream is behaving as expected
570 bool is_video_frame = DO(rs2_is_frame_extendable_to, rframe, RS2_EXTENSION_VIDEO_FRAME);
571 int rs_bits_per_pixel = DO(rs2_get_frame_bits_per_pixel, rframe);
572 int rs_width = DO(rs2_get_frame_width, rframe);
573 int rs_height = DO(rs2_get_frame_height, rframe);
574 int rs_stride = DO(rs2_get_frame_stride_in_bytes, rframe);
575 RS_DASSERT_(is_video_frame);
576 RS_DASSERT_(rs_bits_per_pixel == bytes_per_pixel * 8);
577 RS_DASSERT(rs_width == rs->video_width, "%d != %d", rs_width, rs->video_width);
578 RS_DASSERT(rs_height == rs->video_height, "%d != %d", rs_height, rs->video_height);
579 RS_DASSERT(rs_stride == stride, "%d != %d", rs_stride, stride);
580#endif
581
582 struct xrt_frame *xf = U_TYPED_CALLOC(struct xrt_frame);
583 xf->reference.count = 1;
584 xf->destroy = rs_source_frame_destroy;
585 xf->owner = rframe;
586 xf->width = rs->video_width;
587 xf->height = rs->video_height;
588 xf->stride = stride;
589 xf->size = rs->video_height * stride;
590 xf->data = data;
591
592 xf->format = rs->xrt_video_format;
593 xf->stereo_format = XRT_STEREO_FORMAT_NONE; //!< @todo Use a stereo xrt_format
594
595 uint64_t timestamp_ns = timestamp_ms * U_TIME_1MS_IN_NS;
596 // Don't set timestamp here, timestamp_ns is in realtime clock but we need
597 // monotonic. The user of this function should do it later.
598 // xf->timestamp = timestamp_ns;
599 xf->source_timestamp = timestamp_ns;
600 xf->source_sequence = number;
601 xf->source_id = rs->xfs.source_id;
602
603 *out_xframe = xf;
604}
605
606static void
607handle_frameset(struct rs_source *rs, rs2_frame *frames)
608{
609 // Check number of frames on debug builds
610 int num_of_frames = DO(rs2_embedded_frames_count, frames);
611 if (rs->stereo) {
612 RS_DASSERT(num_of_frames == 2, "Stereo frameset contains %d (!= 2) frames", num_of_frames);
613 } else {
614 RS_DASSERT(num_of_frames == 1, "Non-stereo frameset contains %d (!= 1) frames", num_of_frames);
615 }
616
617 // Left frame
618 rs2_frame *rframe_left = DO(rs2_extract_frame, frames, 0);
619 struct xrt_frame *xf_left = NULL;
620 rs2xrt_frame(rs, rframe_left, &xf_left);
621
622 if (rs->stereo) {
623
624 // Right frame
625 rs2_frame *rframe_right = DO(rs2_extract_frame, frames, 1);
626 struct xrt_frame *xf_right = NULL;
627 rs2xrt_frame(rs, rframe_right, &xf_right);
628
629 if (xf_left->timestamp == xf_right->timestamp) {
630
631 // Correct timestamps to same monotonic time
632 uint64_t now_monotonic = os_monotonic_get_ns();
633 uint64_t now_realtime = os_realtime_get_ns();
634 timepoint_ns ts = get_frame_monotonic_ts_from(rs, frames, now_monotonic, now_realtime);
635 xf_left->timestamp = ts;
636 xf_right->timestamp = ts;
637
638 xrt_sink_push_frame(rs->in_sinks.cams[0], xf_left);
639 xrt_sink_push_frame(rs->in_sinks.cams[1], xf_right);
640 } else {
641 // This usually happens only once at start and never again
642 RS_WARN(rs, "Realsense device sent left and right frames with different timestamps %ld != %ld",
643 xf_left->timestamp, xf_right->timestamp);
644 }
645
646 xrt_frame_reference(&xf_right, NULL);
647 } else {
648 xrt_sink_push_frame(rs->in_sinks.cams[0], xf_left);
649 }
650
651 xrt_frame_reference(&xf_left, NULL);
652
653 // Release frameset but individual frames will be released on xrt_frame destruction
654 rs2_release_frame(frames);
655}
656
657//! Decides when to submit the full IMU sample out of separate
658//! gyroscope/accelerometer samples.
659static void
660partial_imu_sample_push(struct rs_source *rs, timepoint_ns ts, struct xrt_vec3 vals, bool is_gyro)
661{
662 os_mutex_lock(&rs->partial_imu_sample.mutex);
663
664 if (is_gyro) {
665 rs->partial_imu_sample.gyro = vals;
666 } else {
667 rs->partial_imu_sample.accel = vals;
668 }
669 struct xrt_vec3 gyro = rs->partial_imu_sample.gyro;
670 struct xrt_vec3 accel = rs->partial_imu_sample.accel;
671
672 // Push IMU sample from fastest motion sensor arrives, reuse latest data from the other sensor (or zero)
673 bool should_submit = (rs->gyro_fps > rs->accel_fps) == is_gyro;
674 if (should_submit) {
675 struct xrt_imu_sample sample = {ts, {accel.x, accel.y, accel.z}, {gyro.x, gyro.y, gyro.z}};
676 xrt_sink_push_imu(rs->in_sinks.imu, &sample);
677 }
678
679 os_mutex_unlock(&rs->partial_imu_sample.mutex);
680}
681
682static void
683handle_gyro_frame(struct rs_source *rs, rs2_frame *frame)
684{
685 const float *data = DO(rs2_get_frame_data, frame);
686
687#ifndef NDEBUG
688 int data_size = DO(rs2_get_frame_data_size, frame);
689 RS_DASSERT(data_size == 3 * sizeof(float) || data_size == 4 * sizeof(float), "Unexpected size=%d", data_size);
690 RS_DASSERT_(data_size != 4 || data[3] == 0);
691#endif
692 timepoint_ns timestamp_ns = get_frame_monotonic_ts(rs, frame);
693 struct xrt_vec3 gyro = {data[0], data[1], data[2]};
694 RS_TRACE(rs, "gyro t=%ld x=%f y=%f z=%f", timestamp_ns, gyro.x, gyro.y, gyro.z);
695 partial_imu_sample_push(rs, timestamp_ns, gyro, true);
696 rs2_release_frame(frame);
697}
698
699static void
700handle_accel_frame(struct rs_source *rs, rs2_frame *frame)
701{
702 const float *data = DO(rs2_get_frame_data, frame);
703
704#ifndef NDEBUG
705 int data_size = DO(rs2_get_frame_data_size, frame);
706 // For some strange reason data_size is 4 for samples that can use hardware
707 // timestamps. And that last element data[3] seems to always be zero.
708 RS_DASSERT(data_size == 3 * sizeof(float) || data_size == 4 * sizeof(float), "Unexpected size=%d", data_size);
709 RS_DASSERT_(data_size != 4 || data[3] == 0);
710#endif
711 timepoint_ns timestamp_ns = get_frame_monotonic_ts(rs, frame);
712 struct xrt_vec3 accel = {data[0], data[1], data[2]};
713 RS_TRACE(rs, "accel t=%ld x=%f y=%f z=%f", timestamp_ns, accel.x, accel.y, accel.z);
714 partial_imu_sample_push(rs, timestamp_ns, accel, false);
715 rs2_release_frame(frame);
716}
717
718//! Checks that the timestamp domain of the realsense sample (the frame) is in
719//! global time or, at the very least, in another domain that we support.
720static inline void
721check_global_time(struct rs_source *rs, rs2_frame *frame, rs2_stream stream_type)
722{
723
724#ifndef NDEBUG // Check valid timestamp domains only on debug builds
725 rs2_timestamp_domain ts_domain = DO(rs2_get_frame_timestamp_domain, frame);
726 bool using_global_time = ts_domain == RS2_TIMESTAMP_DOMAIN_GLOBAL_TIME;
727 bool acceptable_timestamp_domain = using_global_time;
728
729 //! @note We should be ensuring that we have the same timestamp domains in all
730 //! sensors. But the user might have a newer kernel versions that is not
731 //! supported by the RealSense DKMS package that allows GLOBAL_TIME for all
732 //! sensors. From my experience and based on other users' reports, the only
733 //! affected sensor without GLOBAL_TIME is the gyroscope, which is ~30ms off.
734 //! See https://github.com/IntelRealSense/librealsense/issues/5710
735
736 bool is_accel = stream_type == RS2_STREAM_ACCEL;
737 bool is_gyro = stream_type == RS2_STREAM_GYRO;
738 bool is_motion_sensor = is_accel || is_gyro;
739
740 if (is_motion_sensor) {
741 bool is_gyro_slower = rs->gyro_fps < rs->accel_fps;
742 bool is_slower_motion_sensor = is_gyro_slower == is_gyro;
743
744 // We allow different domains for the slower sensor because partial_imu_sample
745 // discards those timestamps
746 acceptable_timestamp_domain |= is_slower_motion_sensor;
747 }
748
749 if (!acceptable_timestamp_domain) {
750 RS_ERROR(rs, "Invalid ts_domain=%s", rs2_timestamp_domain_to_string(ts_domain));
751 RS_ERROR(rs, "One of your RealSense sensors is not using GLOBAL_TIME domain for its timestamps.");
752 RS_ERROR(rs, "This should be solved by applying the kernel patch required by the RealSense SDK.");
753 if (is_motion_sensor) {
754 const char *a = is_accel ? "accelerometer" : "gyroscope";
755 const char *b = is_accel ? "gyroscope" : "accelerometer";
756 RS_ERROR(rs, "As an alternative, set %s frequency to be greater than %s frequency.", b, a);
757 }
758 RS_DASSERT(false, "Unacceptable timestamp domain %s", rs2_timestamp_domain_to_string(ts_domain));
759 }
760#endif
761}
762
763static void
764on_frame(rs2_frame *frame, void *ptr)
765{
766 struct rs_source *rs = (struct rs_source *)ptr;
767
768 const rs2_stream_profile *stream = DO(rs2_get_frame_stream_profile, frame);
769 rs2_stream stream_type;
770 rs2_format format;
771 int index, unique_id, framerate;
772 DO(rs2_get_stream_profile_data, stream, &stream_type, &format, &index, &unique_id, &framerate);
773
774 bool is_frameset = DO(rs2_is_frame_extendable_to, frame, RS2_EXTENSION_COMPOSITE_FRAME);
775 bool is_motion_frame = DO(rs2_is_frame_extendable_to, frame, RS2_EXTENSION_MOTION_FRAME);
776 check_global_time(rs, frame, stream_type);
777
778 if (stream_type == rs->stream_type) {
779 RS_DASSERT_(is_frameset && format == rs->video_format &&
780 (index == rs->stream1_index || index == rs->stream2_index) && framerate == rs->video_fps);
781 handle_frameset(rs, frame);
782 } else if (stream_type == RS2_STREAM_GYRO) {
783 RS_DASSERT_(is_motion_frame && format == RS2_FORMAT_MOTION_XYZ32F && framerate == rs->gyro_fps);
784 handle_gyro_frame(rs, frame);
785 } else if (stream_type == RS2_STREAM_ACCEL) {
786 RS_DASSERT_(is_motion_frame && format == RS2_FORMAT_MOTION_XYZ32F && framerate == rs->accel_fps);
787 handle_accel_frame(rs, frame);
788 } else {
789 RS_ASSERT(false, "Unexpected stream");
790 }
791}
792
793static void
794rs_source_apply_changes_ctn_cb(void *ptr)
795{
796 struct rs_source *rs = (struct rs_source *)ptr;
797 rs->video_autoexposure = rs->ui_autoexposure;
798 rs->video_exposure = rs->ui_exposure.val;
799 rs->video_gain = rs->ui_gain.val;
800 update_options(rs);
801}
802
803/*
804 *
805 * Frameserver functionality
806 *
807 */
808
809static inline struct rs_source *
810rs_source_from_xfs(struct xrt_fs *xfs)
811{
812 struct rs_source *rs = container_of(xfs, struct rs_source, xfs);
813 return rs;
814}
815
816static bool
817rs_source_enumerate_modes(struct xrt_fs *xfs, struct xrt_fs_mode **out_modes, uint32_t *out_count)
818{
819 struct rs_source *rs = container_of(xfs, struct rs_source, xfs);
820 struct xrt_fs_mode *modes = U_TYPED_ARRAY_CALLOC(struct xrt_fs_mode, 1);
821 RS_ASSERT(modes != NULL, "Unable to calloc rs_source playback modes");
822
823 //! @todo only exposing the one stream configuration the user provided through
824 //! the json configuration but we could show all possible stream setups.
825 modes[0] = (struct xrt_fs_mode){.width = rs->video_width,
826 .height = rs->video_height,
827 .format = rs->xrt_video_format,
828 //! @todo The stereo_format being NONE is incorrect but one that supports
829 //! frames in different memory regions does not exist yet.
830 .stereo_format = XRT_STEREO_FORMAT_NONE};
831
832 *out_modes = modes;
833 *out_count = 1;
834
835 return true;
836}
837
838static bool
839rs_source_configure_capture(struct xrt_fs *xfs, struct xrt_fs_capture_parameters *cp)
840{
841 //! @todo implement
842 RS_ASSERT(false, "Not Implemented");
843 return false;
844}
845
846static bool
847rs_source_stream_stop(struct xrt_fs *xfs)
848{
849 struct rs_source *rs = rs_source_from_xfs(xfs);
850 if (rs->is_running) {
851 DO(rs2_pipeline_stop, rs->rsc.pipeline);
852 rs->is_running = false;
853 }
854 return true;
855}
856
857static bool
858rs_source_is_running(struct xrt_fs *xfs)
859{
860 struct rs_source *rs = rs_source_from_xfs(xfs);
861 return rs->is_running;
862}
863
864static bool
865rs_source_stream_start(struct xrt_fs *xfs,
866 struct xrt_frame_sink *xs,
867 enum xrt_fs_capture_type capture_type,
868 uint32_t descriptor_index)
869{
870 struct rs_source *rs = rs_source_from_xfs(xfs);
871 if (xs == NULL && capture_type == XRT_FS_CAPTURE_TYPE_TRACKING) {
872 RS_ASSERT(rs->out_sinks.cams[0] != NULL, "No left sink provided");
873 RS_INFO(rs, "Starting RealSense stream in tracking mode");
874 } else if (xs != NULL && capture_type == XRT_FS_CAPTURE_TYPE_CALIBRATION) {
875 RS_INFO(rs, "Starting RealSense stream in calibration mode, will stream only left frames");
876 rs->out_sinks.cam_count = 1;
877 rs->out_sinks.cams[0] = xs;
878 } else {
879 RS_ASSERT(false, "Unsupported stream configuration xs=%p capture_type=%d", (void *)xs, capture_type);
880 return false;
881 }
882
883 struct rs_container *rsc = &rs->rsc;
884 rsc->profile = DO(rs2_pipeline_start_with_config_and_callback, rsc->pipeline, rsc->config, on_frame, rs);
885
886 set_option_in_all_sensors(rs, RS2_OPTION_EMITTER_ENABLED, 0); // Lasers are bad for SLAM
887 if (rs->video_change_exposure) {
888 update_options(rs);
889 }
890
891 rs->is_running = true;
892 return rs->is_running;
893}
894
895static bool
896rs_source_slam_stream_start(struct xrt_fs *xfs, struct xrt_slam_sinks *sinks)
897{
898 struct rs_source *rs = rs_source_from_xfs(xfs);
899 rs->out_sinks = *sinks;
900 return rs_source_stream_start(xfs, NULL, XRT_FS_CAPTURE_TYPE_TRACKING, 0);
901}
902
903
904/*
905 *
906 * Sinks functionality
907 *
908 */
909
910static void
911receive_left_frame(struct xrt_frame_sink *sink, struct xrt_frame *xf)
912{
913 struct rs_source *rs = container_of(sink, struct rs_source, left_sink);
914 RS_TRACE(rs, "left img t=%ld source_t=%ld", xf->timestamp, xf->source_timestamp);
915 u_sink_debug_push_frame(&rs->ui_left_sink, xf);
916 if (rs->out_sinks.cams[0]) {
917 xrt_sink_push_frame(rs->out_sinks.cams[0], xf);
918 }
919}
920
921static void
922receive_right_frame(struct xrt_frame_sink *sink, struct xrt_frame *xf)
923{
924 struct rs_source *rs = container_of(sink, struct rs_source, right_sink);
925 RS_TRACE(rs, "right img t=%ld source_t=%ld", xf->timestamp, xf->source_timestamp);
926 u_sink_debug_push_frame(&rs->ui_right_sink, xf);
927 if (rs->out_sinks.cams[1]) {
928 xrt_sink_push_frame(rs->out_sinks.cams[1], xf);
929 }
930}
931
932static void
933receive_imu_sample(struct xrt_imu_sink *sink, struct xrt_imu_sample *s)
934{
935 struct rs_source *rs = container_of(sink, struct rs_source, imu_sink);
936
937 timepoint_ns ts = s->timestamp_ns;
938 struct xrt_vec3_f64 a = s->accel_m_s2;
939 struct xrt_vec3_f64 w = s->gyro_rad_secs;
940 RS_TRACE(rs, "imu t=%ld a=(%f %f %f) w=(%f %f %f)", ts, a.x, a.y, a.z, w.x, w.y, w.z);
941
942 // Push to debug UI
943
944 struct xrt_vec3 gyro = {(float)w.x, (float)w.y, (float)w.z};
945 struct xrt_vec3 accel = {(float)a.x, (float)a.y, (float)a.z};
946 m_ff_vec3_f32_push(rs->gyro_ff, &gyro, ts);
947 m_ff_vec3_f32_push(rs->accel_ff, &accel, ts);
948
949 if (rs->out_sinks.imu) {
950 xrt_sink_push_imu(rs->out_sinks.imu, s);
951 }
952}
953
954
955/*
956 *
957 * Frame node functionality
958 *
959 */
960
961static void
962rs_source_node_break_apart(struct xrt_frame_node *node)
963{
964 struct rs_source *rs = container_of(node, struct rs_source, node);
965 rs_source_stream_stop(&rs->xfs);
966}
967
968static void
969rs_source_node_destroy(struct xrt_frame_node *node)
970{
971 struct rs_source *rs = container_of(node, struct rs_source, node);
972 RS_INFO(rs, "Destroying RealSense source");
973 os_mutex_destroy(&rs->partial_imu_sample.mutex);
974 u_var_remove_root(rs);
975 u_sink_debug_destroy(&rs->ui_left_sink);
976 u_sink_debug_destroy(&rs->ui_right_sink);
977 m_ff_vec3_f32_free(&rs->gyro_ff);
978 m_ff_vec3_f32_free(&rs->accel_ff);
979 rs_container_cleanup(&rs->rsc);
980 free(rs);
981}
982
983
984/*
985 *
986 * Exported functions
987 *
988 */
989
990struct xrt_device *
991rs_hdev_create(struct xrt_prober *xp, int device_idx)
992{
993 struct rs_hdev *rh = U_DEVICE_ALLOCATE(struct rs_hdev, U_DEVICE_ALLOC_TRACKING_NONE, 1, 0);
994 rh->log_level = debug_get_log_option_rs_log();
995 rh->pose = (struct xrt_pose){{0, 0, 0, 1}, {0, 0, 0}};
996 rh->offset = (struct xrt_pose){{0, 0, 0, 1}, {0, 0, 0}};
997
998 struct xrt_device *xd = &rh->xdev;
999 xd->name = XRT_DEVICE_REALSENSE;
1000 xd->device_type = XRT_DEVICE_TYPE_GENERIC_TRACKER;
1001
1002 snprintf(xd->str, XRT_DEVICE_NAME_LEN, "%s", RS_DEVICE_STR);
1003 snprintf(xd->serial, XRT_DEVICE_NAME_LEN, "%s", RS_DEVICE_STR);
1004
1005 snprintf(xd->tracking_origin->name, XRT_TRACKING_NAME_LEN, "%s", RS_HOST_SLAM_TRACKER_STR);
1006 xd->tracking_origin->type = XRT_TRACKING_TYPE_EXTERNAL_SLAM;
1007
1008 xd->inputs[0].name = XRT_INPUT_GENERIC_TRACKER_POSE;
1009
1010 xd->supported.orientation_tracking = true;
1011 xd->supported.position_tracking = true;
1012
1013 xd->update_inputs = u_device_noop_update_inputs;
1014 xd->get_tracked_pose = rs_hdev_get_tracked_pose;
1015 xd->destroy = rs_hdev_destroy;
1016
1017 // Setup UI
1018 u_var_add_root(rh, "RealSense Device", false);
1019 u_var_add_ro_text(rh, "Host SLAM", "Tracked by");
1020 u_var_add_log_level(rh, &rh->log_level, "Log Level");
1021 u_var_add_pose(rh, &rh->pose, "SLAM Pose");
1022 u_var_add_pose(rh, &rh->offset, "Offset Pose");
1023
1024 bool tracked = xp->tracking->create_tracked_slam(xp->tracking, &rh->slam) >= 0;
1025 if (!tracked) {
1026 RS_WARN(rh, "Unable to setup the SLAM tracker");
1027 rs_hdev_destroy(xd);
1028 return NULL;
1029 }
1030
1031 RS_DEBUG(rh, "Host-SLAM RealSense device created");
1032
1033 return xd;
1034}
1035
1036//! Create and open the frame server for IMU/camera streaming.
1037struct xrt_fs *
1038rs_source_create(struct xrt_frame_context *xfctx, int device_idx)
1039{
1040 struct rs_source *rs = U_TYPED_CALLOC(struct rs_source);
1041 rs->log_level = debug_get_log_option_rs_log();
1042
1043 // Setup xrt_fs
1044 struct xrt_fs *xfs = &rs->xfs;
1045 xfs->enumerate_modes = rs_source_enumerate_modes;
1046 xfs->configure_capture = rs_source_configure_capture;
1047 xfs->stream_start = rs_source_stream_start;
1048 xfs->slam_stream_start = rs_source_slam_stream_start;
1049 xfs->stream_stop = rs_source_stream_stop;
1050 xfs->is_running = rs_source_is_running;
1051 snprintf(xfs->name, sizeof(xfs->name), RS_SOURCE_STR);
1052 snprintf(xfs->product, sizeof(xfs->product), RS_SOURCE_STR " Product");
1053 snprintf(xfs->manufacturer, sizeof(xfs->manufacturer), RS_SOURCE_STR " Manufacturer");
1054 snprintf(xfs->serial, sizeof(xfs->serial), RS_SOURCE_STR " Serial");
1055 xfs->source_id = 0x2EA15E115E;
1056
1057 // Setup realsense pipeline data
1058 struct rs_container *rsc = &rs->rsc;
1059 rsc->error_status = NULL;
1060 rsc->context = DO(rs2_create_context, RS2_API_VERSION);
1061 rsc->device_list = DO(rs2_query_devices, rsc->context);
1062 rsc->device_count = DO(rs2_get_device_count, rsc->device_list);
1063 rsc->device_idx = device_idx;
1064 rsc->device = DO(rs2_create_device, rsc->device_list, rsc->device_idx);
1065 rsc->pipeline = DO(rs2_create_pipeline, rsc->context);
1066 rsc->config = DO_(rs2_create_config);
1067
1068 // Set the pipeline to start specifically on the realsense device the prober selected
1069 bool hdev_has_serial = DO(rs2_supports_device_info, rsc->device, RS2_CAMERA_INFO_SERIAL_NUMBER);
1070 if (hdev_has_serial) {
1071 const char *hdev_serial = DO(rs2_get_device_info, rsc->device, RS2_CAMERA_INFO_SERIAL_NUMBER);
1072 DO(rs2_config_enable_device, rsc->config, hdev_serial);
1073 } else {
1074 RS_WARN(rs, "Unexpected, the realsense device in use does not provide a serial number.");
1075 }
1076
1077 // Load RealSense pipeline options from json
1078 rs_source_load_stream_options_from_json(rs);
1079
1080 // Enable RealSense pipeline streams
1081 rs2_stream stream_type = rs->stream_type;
1082 int width = rs->video_width;
1083 int height = rs->video_height;
1084 int fps = rs->video_fps;
1085 rs2_format format = rs->video_format;
1086 DO(rs2_config_enable_stream, rsc->config, RS2_STREAM_GYRO, 0, 0, 0, RS2_FORMAT_MOTION_XYZ32F, rs->gyro_fps);
1087 DO(rs2_config_enable_stream, rsc->config, RS2_STREAM_ACCEL, 0, 0, 0, RS2_FORMAT_MOTION_XYZ32F, rs->accel_fps);
1088 DO(rs2_config_enable_stream, rsc->config, stream_type, rs->stream1_index, width, height, format, fps);
1089 if (rs->stereo) {
1090 DO(rs2_config_enable_stream, rsc->config, stream_type, rs->stream2_index, width, height, format, fps);
1091 }
1092
1093 // Setup sinks
1094 rs->left_sink.push_frame = receive_left_frame;
1095 rs->right_sink.push_frame = receive_right_frame;
1096 rs->imu_sink.push_imu = receive_imu_sample;
1097 rs->in_sinks.cam_count = 2;
1098 rs->in_sinks.cams[0] = &rs->left_sink;
1099 rs->in_sinks.cams[1] = &rs->right_sink;
1100 rs->in_sinks.imu = &rs->imu_sink;
1101
1102 // Prepare UI
1103 u_sink_debug_init(&rs->ui_left_sink);
1104 u_sink_debug_init(&rs->ui_right_sink);
1105 m_ff_vec3_f32_alloc(&rs->gyro_ff, 1000);
1106 m_ff_vec3_f32_alloc(&rs->accel_ff, 1000);
1107 rs->ui_autoexposure = rs->video_autoexposure;
1108 float estep = 50;
1109 float shutter_delay = 1500; // Approximated value, obtained by playing with the realsense-viewer
1110 float emax = 1000 * 1000 / fps - fmod(1000 * 1000 / fps, estep) - shutter_delay;
1111 rs->ui_exposure = (struct u_var_draggable_f32){.val = rs->video_exposure, .min = 0, .max = emax, .step = estep};
1112 rs->ui_gain = (struct u_var_draggable_f32){.val = rs->video_gain, .min = 16, .max = 248, .step = 1};
1113 rs->ui_btn_apply = (struct u_var_button){.cb = rs_source_apply_changes_ctn_cb, .ptr = rs};
1114
1115 // Setup UI
1116 u_var_add_root(rs, "RealSense Source", false);
1117 u_var_add_log_level(rs, &rs->log_level, "Log Level");
1118 u_var_add_ro_ff_vec3_f32(rs, rs->gyro_ff, "Gyroscope");
1119 u_var_add_ro_ff_vec3_f32(rs, rs->accel_ff, "Accelerometer");
1120 u_var_add_sink_debug(rs, &rs->ui_left_sink, "Left Camera");
1121 u_var_add_sink_debug(rs, &rs->ui_right_sink, "Right Camera");
1122 if (rs->video_change_exposure) {
1123 u_var_add_gui_header(rs, NULL, "Stream options");
1124 u_var_add_bool(rs, &rs->ui_autoexposure, "Enable autoexposure");
1125 u_var_add_draggable_f32(rs, &rs->ui_exposure, "Exposure (usec)");
1126 u_var_add_draggable_f32(rs, &rs->ui_gain, "Gain");
1127 u_var_add_button(rs, &rs->ui_btn_apply, "Apply");
1128 }
1129
1130 // Setup node
1131 struct xrt_frame_node *xfn = &rs->node;
1132 xfn->break_apart = rs_source_node_break_apart;
1133 xfn->destroy = rs_source_node_destroy;
1134 xrt_frame_context_add(xfctx, &rs->node);
1135
1136 // Setup IMU synchronizer lock
1137 os_mutex_init(&rs->partial_imu_sample.mutex);
1138
1139 return xfs;
1140}