The open source OpenXR runtime

xrt: adapt driver to new get_view_poses api change

Part-of: <https://gitlab.freedesktop.org/monado/monado/-/merge_requests/2365>

+140 -156
+11 -4
src/xrt/auxiliary/util/u_device.c
··· 451 451 * 452 452 */ 453 453 454 - void 454 + xrt_result_t 455 455 u_device_get_view_poses(struct xrt_device *xdev, 456 456 const struct xrt_vec3 *default_eye_relation, 457 457 int64_t at_timestamp_ns, ··· 460 460 struct xrt_fov *out_fovs, 461 461 struct xrt_pose *out_poses) 462 462 { 463 - xrt_device_get_tracked_pose(xdev, XRT_INPUT_GENERIC_HEAD_POSE, at_timestamp_ns, out_head_relation); 463 + xrt_result_t xret = 464 + xrt_device_get_tracked_pose(xdev, XRT_INPUT_GENERIC_HEAD_POSE, at_timestamp_ns, out_head_relation); 465 + if (xret != XRT_SUCCESS) { 466 + return xret; 467 + } 464 468 465 469 for (uint32_t i = 0; i < view_count && i < ARRAY_SIZE(xdev->hmd->views); i++) { 466 470 out_fovs[i] = xdev->hmd->distortion.fov[i]; ··· 469 473 for (uint32_t i = 0; i < view_count; i++) { 470 474 u_device_get_view_pose(default_eye_relation, i, &out_poses[i]); 471 475 } 476 + 477 + return XRT_SUCCESS; 472 478 } 473 479 474 480 xrt_result_t ··· 521 527 E(get_hand_tracking); 522 528 } 523 529 524 - void 530 + xrt_result_t 525 531 u_device_ni_get_view_poses(struct xrt_device *xdev, 526 532 const struct xrt_vec3 *default_eye_relation, 527 533 int64_t at_timestamp_ns, ··· 530 536 struct xrt_fov *out_fovs, 531 537 struct xrt_pose *out_poses) 532 538 { 533 - E(get_hand_tracking); 539 + E(get_view_poses); 540 + return XRT_ERROR_NOT_IMPLEMENTED; 534 541 } 535 542 536 543 bool
+2 -2
src/xrt/auxiliary/util/u_device.h
··· 166 166 * 167 167 * The field @ref xrt_device::hmd needs to be set and valid. 168 168 */ 169 - void 169 + xrt_result_t 170 170 u_device_get_view_poses(struct xrt_device *xdev, 171 171 const struct xrt_vec3 *default_eye_relation, 172 172 int64_t at_timestamp_ns, ··· 233 233 * 234 234 * @ingroup aux_util 235 235 */ 236 - void 236 + xrt_result_t 237 237 u_device_ni_get_view_poses(struct xrt_device *xdev, 238 238 const struct xrt_vec3 *default_eye_relation, 239 239 int64_t at_timestamp_ns,
+7 -4
src/xrt/drivers/multi_wrapper/multi.c
··· 193 193 xrt_device_set_output(target, name, value); 194 194 } 195 195 196 - static void 196 + static xrt_result_t 197 197 get_view_poses(struct xrt_device *xdev, 198 198 const struct xrt_vec3 *default_eye_relation, 199 199 int64_t at_timestamp_ns, ··· 204 204 { 205 205 struct multi_device *d = (struct multi_device *)xdev; 206 206 struct xrt_device *target = d->tracking_override.target; 207 - xrt_device_get_view_poses(target, default_eye_relation, at_timestamp_ns, view_count, out_head_relation, 208 - out_fovs, out_poses); 207 + xrt_result_t xret = xrt_device_get_view_poses(target, default_eye_relation, at_timestamp_ns, view_count, 208 + out_head_relation, out_fovs, out_poses); 209 + if (xret != XRT_SUCCESS) { 210 + return xret; 211 + } 209 212 210 213 /* 211 214 * Use xrt_device_ function to be sure it is exactly 212 215 * like if the state-tracker called this function. 213 216 */ 214 - xrt_device_get_tracked_pose(xdev, XRT_INPUT_GENERIC_HEAD_POSE, at_timestamp_ns, out_head_relation); 217 + return xrt_device_get_tracked_pose(xdev, XRT_INPUT_GENERIC_HEAD_POSE, at_timestamp_ns, out_head_relation); 215 218 } 216 219 217 220 static bool
+14 -9
src/xrt/drivers/north_star/ns_hmd.c
··· 385 385 return XRT_SUCCESS; 386 386 } 387 387 388 - static void 388 + static xrt_result_t 389 389 ns_hmd_get_view_poses(struct xrt_device *xdev, 390 390 const struct xrt_vec3 *default_eye_relation, 391 391 int64_t at_timestamp_ns, ··· 398 398 NS_DEBUG(ns, "Called!"); 399 399 400 400 // Use this to take care of most stuff, then fix up below. 401 - u_device_get_view_poses( // 402 - xdev, // 403 - default_eye_relation, // 404 - at_timestamp_ns, // 405 - view_count, // 406 - out_head_relation, // 407 - out_fovs, // 408 - out_poses); // 401 + xrt_result_t xret = u_device_get_view_poses( // 402 + xdev, // 403 + default_eye_relation, // 404 + at_timestamp_ns, // 405 + view_count, // 406 + out_head_relation, // 407 + out_fovs, // 408 + out_poses); // 409 + if (xret != XRT_SUCCESS) { 410 + return xret; 411 + } 409 412 410 413 // Fix fix. 411 414 for (uint32_t i = 0; i < view_count && i < ARRAY_SIZE(ns->config.head_pose_to_eye); i++) { 412 415 out_poses[i] = ns->config.head_pose_to_eye[i]; 413 416 } 417 + 418 + return XRT_SUCCESS; 414 419 } 415 420 416 421 bool
+1 -13
src/xrt/drivers/realsense/rs_ddev.c
··· 406 406 } 407 407 408 408 static void 409 - rs_ddev_get_view_poses(struct xrt_device *xdev, 410 - const struct xrt_vec3 *default_eye_relation, 411 - int64_t at_timestamp_ns, 412 - uint32_t view_count, 413 - struct xrt_space_relation *out_head_relation, 414 - struct xrt_fov *out_fovs, 415 - struct xrt_pose *out_poses) 416 - { 417 - assert(false); 418 - } 419 - 420 - static void 421 409 rs_ddev_destroy(struct xrt_device *xdev) 422 410 { 423 411 struct rs_ddev *rs = rs_ddev(xdev); ··· 462 450 rs->enable_relocalization, rs->enable_pose_prediction, rs->enable_pose_filtering); 463 451 rs->base.update_inputs = u_device_noop_update_inputs; 464 452 rs->base.get_tracked_pose = rs_ddev_get_tracked_pose; 465 - rs->base.get_view_poses = rs_ddev_get_view_poses; 453 + rs->base.get_view_poses = u_device_ni_get_view_poses; 466 454 rs->base.destroy = rs_ddev_destroy; 467 455 rs->base.name = XRT_DEVICE_REALSENSE; 468 456 rs->base.tracking_origin->type = XRT_TRACKING_TYPE_EXTERNAL_SLAM;
+1 -13
src/xrt/drivers/remote/r_device.c
··· 180 180 } 181 181 182 182 static void 183 - r_device_get_view_poses(struct xrt_device *xdev, 184 - const struct xrt_vec3 *default_eye_relation, 185 - int64_t at_timestamp_ns, 186 - uint32_t view_count, 187 - struct xrt_space_relation *out_head_relation, 188 - struct xrt_fov *out_fovs, 189 - struct xrt_pose *out_poses) 190 - { 191 - assert(false); 192 - } 193 - 194 - static void 195 183 r_device_set_output(struct xrt_device *xdev, enum xrt_output_name name, const struct xrt_output_value *value) 196 184 { 197 185 struct r_device *rd = r_device(xdev); ··· 215 203 rd->base.update_inputs = r_device_update_inputs; 216 204 rd->base.get_tracked_pose = r_device_get_tracked_pose; 217 205 rd->base.get_hand_tracking = r_device_get_hand_tracking; 218 - rd->base.get_view_poses = r_device_get_view_poses; 206 + rd->base.get_view_poses = u_device_ni_get_view_poses; 219 207 rd->base.set_output = r_device_set_output; 220 208 rd->base.destroy = r_device_destroy; 221 209 rd->base.tracking_origin = &r->origin;
+12 -16
src/xrt/drivers/remote/r_hmd.c
··· 74 74 return XRT_SUCCESS; 75 75 } 76 76 77 - static void 77 + static xrt_result_t 78 78 r_hmd_get_view_poses(struct xrt_device *xdev, 79 79 const struct xrt_vec3 *default_eye_relation, 80 80 int64_t at_timestamp_ns, ··· 86 86 struct r_hmd *rh = r_hmd(xdev); 87 87 88 88 if (!rh->r->latest.head.per_view_data_valid) { 89 - u_device_get_view_poses( // 90 - xdev, // 91 - default_eye_relation, // 92 - at_timestamp_ns, // 93 - view_count, // 94 - out_head_relation, // 95 - out_fovs, // 96 - out_poses); // 97 - 98 - // Done now 99 - return; 89 + return u_device_get_view_poses( // 90 + xdev, // 91 + default_eye_relation, // 92 + at_timestamp_ns, // 93 + view_count, // 94 + out_head_relation, // 95 + out_fovs, // 96 + out_poses); // 100 97 } 101 98 102 - if (view_count > ARRAY_SIZE(rh->r->latest.head.views)) { 103 - U_LOG_E("Asking for too many views!"); 104 - return; 105 - } 99 + assert(view_count <= ARRAY_SIZE(rh->r->latest.head.views)); 106 100 107 101 copy_head_center_to_relation(rh, out_head_relation); 108 102 ··· 110 104 out_poses[i] = rh->r->latest.head.views[i].pose; 111 105 out_fovs[i] = rh->r->latest.head.views[i].fov; 112 106 } 107 + 108 + return XRT_SUCCESS; 113 109 } 114 110 115 111 static void
+9 -9
src/xrt/drivers/sample/sample_hmd.c
··· 128 128 return XRT_SUCCESS; 129 129 } 130 130 131 - static void 131 + static xrt_result_t 132 132 sample_hmd_get_view_poses(struct xrt_device *xdev, 133 133 const struct xrt_vec3 *default_eye_relation, 134 134 int64_t at_timestamp_ns, ··· 141 141 * For HMDs you can call this function or directly set 142 142 * the `get_view_poses` function on the device to it. 143 143 */ 144 - u_device_get_view_poses( // 145 - xdev, // 146 - default_eye_relation, // 147 - at_timestamp_ns, // 148 - view_count, // 149 - out_head_relation, // 150 - out_fovs, // 151 - out_poses); // 144 + return u_device_get_view_poses( // 145 + xdev, // 146 + default_eye_relation, // 147 + at_timestamp_ns, // 148 + view_count, // 149 + out_head_relation, // 150 + out_fovs, // 151 + out_poses); // 152 152 } 153 153 154 154 static xrt_result_t
+14 -9
src/xrt/drivers/simula/svr_hmd.c
··· 99 99 100 100 #define DEG_TO_RAD(DEG) (DEG * M_PI / 180.) 101 101 102 - static void 102 + static xrt_result_t 103 103 svr_hmd_get_view_poses(struct xrt_device *xdev, 104 104 const struct xrt_vec3 *default_eye_relation, 105 105 int64_t at_timestamp_ns, ··· 111 111 //!@todo: default_eye_relation inherits from the env var OXR_DEBUG_IPD_MM / oxr_session.c 112 112 // probably needs a lot more attention 113 113 114 - u_device_get_view_poses( // 115 - xdev, // 116 - default_eye_relation, // 117 - at_timestamp_ns, // 118 - view_count, // 119 - out_head_relation, // 120 - out_fovs, // 121 - out_poses); // 114 + xrt_result_t xret = u_device_get_view_poses( // 115 + xdev, // 116 + default_eye_relation, // 117 + at_timestamp_ns, // 118 + view_count, // 119 + out_head_relation, // 120 + out_fovs, // 121 + out_poses); // 122 + if (xret != XRT_SUCCESS) { 123 + return xret; 124 + } 122 125 123 126 //!@todo you may need to invert this - I can't test locally 124 127 float turn_vals[2] = {5.0, -5.0}; ··· 126 129 struct xrt_vec3 y_up = (struct xrt_vec3)XRT_VEC3_UNIT_Y; 127 130 math_quat_from_angle_vector(DEG_TO_RAD(turn_vals[i]), &y_up, &out_poses[i].orientation); 128 131 } 132 + 133 + return XRT_SUCCESS; 129 134 } 130 135 131 136 //!@todo: remove hard-coding and move to u_distortion_mesh
+1 -13
src/xrt/drivers/simulated/simulated_controller.c
··· 167 167 } 168 168 169 169 static void 170 - simulated_device_get_view_poses(struct xrt_device *xdev, 171 - const struct xrt_vec3 *default_eye_relation, 172 - int64_t at_timestamp_ns, 173 - uint32_t view_count, 174 - struct xrt_space_relation *out_head_relation, 175 - struct xrt_fov *out_fovs, 176 - struct xrt_pose *out_poses) 177 - { 178 - assert(false); 179 - } 180 - 181 - static void 182 170 simulated_device_set_output(struct xrt_device *xdev, enum xrt_output_name name, const struct xrt_output_value *value) 183 171 { 184 172 struct simulated_device *sd = simulated_device(xdev); ··· 369 357 sd->base.update_inputs = simulated_device_update_inputs; 370 358 sd->base.get_tracked_pose = simulated_device_get_tracked_pose; 371 359 sd->base.get_hand_tracking = u_device_ni_get_hand_tracking; 372 - sd->base.get_view_poses = simulated_device_get_view_poses; 360 + sd->base.get_view_poses = u_device_ni_get_view_poses; 373 361 sd->base.set_output = simulated_device_set_output; 374 362 sd->base.destroy = simulated_device_destroy; 375 363 sd->base.tracking_origin = origin;
+15 -10
src/xrt/drivers/steamvr_lh/device.cpp
··· 174 174 this->inputs = inputs_vec.data(); 175 175 this->input_count = inputs_vec.size(); 176 176 177 + this->xrt_device::get_view_poses = &device_bouncer<HmdDevice, &HmdDevice::get_view_poses, xrt_result_t>; 177 178 #define SETUP_MEMBER_FUNC(name) this->xrt_device::name = &device_bouncer<HmdDevice, &HmdDevice::name> 178 - SETUP_MEMBER_FUNC(get_view_poses); 179 179 SETUP_MEMBER_FUNC(compute_distortion); 180 180 #undef SETUP_MEMBER_FUNC 181 181 } ··· 515 515 this->eye[1].position = rightEye_postquat.position; 516 516 } 517 517 518 - void 518 + xrt_result_t 519 519 HmdDevice::get_view_poses(const xrt_vec3 *default_eye_relation, 520 520 uint64_t at_timestamp_ns, 521 521 uint32_t view_count, ··· 526 526 struct xrt_vec3 eye_relation = *default_eye_relation; 527 527 eye_relation.x = ipd; 528 528 529 - u_device_get_view_poses( // 530 - this, // 531 - &eye_relation, // 532 - at_timestamp_ns, // 533 - view_count, // 534 - out_head_relation, // 535 - out_fovs, // 536 - out_poses); // 529 + xrt_result_t xret = u_device_get_view_poses( // 530 + this, // 531 + &eye_relation, // 532 + at_timestamp_ns, // 533 + view_count, // 534 + out_head_relation, // 535 + out_fovs, // 536 + out_poses); // 537 + if (xret != XRT_SUCCESS) { 538 + return xret; 539 + } 537 540 538 541 out_poses[0].orientation = this->eye[0].orientation; 539 542 out_poses[0].position.z = this->eye[0].position.z; ··· 541 544 out_poses[1].orientation = this->eye[1].orientation; 542 545 out_poses[1].position.z = this->eye[1].position.z; 543 546 out_poses[1].position.y = this->eye[1].position.y; 547 + 548 + return XRT_SUCCESS; 544 549 } 545 550 546 551 bool
+1 -1
src/xrt/drivers/steamvr_lh/device.hpp
··· 124 124 const vr::HmdMatrix34_t &eyeToHeadLeft, 125 125 const vr::HmdMatrix34_t &eyeToHeadRight); 126 126 127 - void 127 + xrt_result_t 128 128 get_view_poses(const xrt_vec3 *default_eye_relation, 129 129 uint64_t at_timestamp_ns, 130 130 uint32_t view_count,
+14 -9
src/xrt/drivers/survive/survive_driver.c
··· 523 523 return XRT_SUCCESS; 524 524 } 525 525 526 - static void 526 + static xrt_result_t 527 527 survive_device_get_view_poses(struct xrt_device *xdev, 528 528 const struct xrt_vec3 *default_eye_relation, 529 529 int64_t at_timestamp_ns, ··· 547 547 eye_relation.x = survive->hmd.ipd; 548 548 } 549 549 550 - u_device_get_view_poses( // 551 - xdev, // 552 - &eye_relation, // 553 - at_timestamp_ns, // 554 - view_count, // 555 - out_head_relation, // 556 - out_fovs, // 557 - out_poses); // 550 + xrt_result_t xret = u_device_get_view_poses( // 551 + xdev, // 552 + &eye_relation, // 553 + at_timestamp_ns, // 554 + view_count, // 555 + out_head_relation, // 556 + out_fovs, // 557 + out_poses); // 558 + if (xret != XRT_SUCCESS) { 559 + return xret; 560 + } 558 561 559 562 // This is for the Index' canted displays, on the Vive [Pro] they are identity. 560 563 for (uint32_t i = 0; i < view_count && i < ARRAY_SIZE(survive->hmd.config.display.rot); i++) { 561 564 out_poses[i].orientation = survive->hmd.config.display.rot[i]; 562 565 } 566 + 567 + return XRT_SUCCESS; 563 568 } 564 569 565 570 enum InputComponent
+1 -13
src/xrt/drivers/twrap/twrap_slam.c
··· 136 136 } 137 137 138 138 static void 139 - twrap_slam_get_view_poses(struct xrt_device *xdev, 140 - const struct xrt_vec3 *default_eye_relation, 141 - int64_t at_timestamp_ns, 142 - uint32_t view_count, 143 - struct xrt_space_relation *out_head_relation, 144 - struct xrt_fov *out_fovs, 145 - struct xrt_pose *out_poses) 146 - { 147 - assert(false); 148 - } 149 - 150 - static void 151 139 twrap_slam_destroy(struct xrt_device *xdev) 152 140 { 153 141 struct slam_device *dx = slam_device(xdev); ··· 173 161 174 162 dx->base.update_inputs = u_device_noop_update_inputs; 175 163 dx->base.get_tracked_pose = twrap_slam_get_tracked_pose; 176 - dx->base.get_view_poses = twrap_slam_get_view_poses; 164 + dx->base.get_view_poses = u_device_ni_get_view_poses; 177 165 dx->base.destroy = twrap_slam_destroy; 178 166 dx->base.name = name; 179 167 dx->base.tracking_origin->type = XRT_TRACKING_TYPE_OTHER;
+14 -9
src/xrt/drivers/vive/vive_device.c
··· 208 208 return xret; 209 209 } 210 210 211 - static void 211 + static xrt_result_t 212 212 vive_device_get_view_poses(struct xrt_device *xdev, 213 213 const struct xrt_vec3 *default_eye_relation, 214 214 int64_t at_timestamp_ns, ··· 222 222 // Only supports two views. 223 223 assert(view_count <= 2); 224 224 225 - u_device_get_view_poses( // 226 - xdev, // 227 - default_eye_relation, // 228 - at_timestamp_ns, // 229 - view_count, // 230 - out_head_relation, // 231 - out_fovs, // 232 - out_poses); // 225 + xrt_result_t xret = u_device_get_view_poses( // 226 + xdev, // 227 + default_eye_relation, // 228 + at_timestamp_ns, // 229 + view_count, // 230 + out_head_relation, // 231 + out_fovs, // 232 + out_poses); // 233 + if (xret != XRT_SUCCESS) { 234 + return xret; 235 + } 233 236 234 237 // This is for the Index' canted displays, on the Vive [Pro] they are identity. 235 238 struct vive_device *d = vive_device(xdev); 236 239 for (uint32_t i = 0; i < view_count && i < ARRAY_SIZE(d->config.display.rot); i++) { 237 240 out_poses[i].orientation = d->config.display.rot[i]; 238 241 } 242 + 243 + return XRT_SUCCESS; 239 244 } 240 245 241 246 static int
+9 -10
src/xrt/include/xrt/xrt_device.h
··· 535 535 * (Caution: Even if you have eye tracking, you 536 536 * won't use eye orientation here!) 537 537 */ 538 - void (*get_view_poses)(struct xrt_device *xdev, 539 - const struct xrt_vec3 *default_eye_relation, 540 - int64_t at_timestamp_ns, 541 - uint32_t view_count, 542 - struct xrt_space_relation *out_head_relation, 543 - struct xrt_fov *out_fovs, 544 - struct xrt_pose *out_poses); 538 + xrt_result_t (*get_view_poses)(struct xrt_device *xdev, 539 + const struct xrt_vec3 *default_eye_relation, 540 + int64_t at_timestamp_ns, 541 + uint32_t view_count, 542 + struct xrt_space_relation *out_head_relation, 543 + struct xrt_fov *out_fovs, 544 + struct xrt_pose *out_poses); 545 545 546 546 /** 547 547 * Compute the distortion at a single point. ··· 823 823 struct xrt_fov *out_fovs, 824 824 struct xrt_pose *out_poses) 825 825 { 826 - xdev->get_view_poses(xdev, default_eye_relation, at_timestamp_ns, view_count, out_head_relation, out_fovs, 827 - out_poses); 828 - return XRT_SUCCESS; 826 + return xdev->get_view_poses(xdev, default_eye_relation, at_timestamp_ns, view_count, out_head_relation, 827 + out_fovs, out_poses); 829 828 } 830 829 831 830 /*!
+14 -12
src/xrt/ipc/client/ipc_client_hmd.c
··· 62 62 return (ipc_client_hmd_t *)xdev; 63 63 } 64 64 65 - static void 65 + static xrt_result_t 66 66 call_get_view_poses_raw(ipc_client_hmd_t *ich, 67 67 const struct xrt_vec3 *default_eye_relation, 68 68 int64_t at_timestamp_ns, ··· 117 117 118 118 out: 119 119 ipc_client_connection_unlock(ipc_c); 120 + return xret; 120 121 } 121 122 122 123 ··· 126 127 * 127 128 */ 128 129 129 - static void 130 + static xrt_result_t 130 131 ipc_client_hmd_get_view_poses(struct xrt_device *xdev, 131 132 const struct xrt_vec3 *default_eye_relation, 132 133 int64_t at_timestamp_ns, ··· 149 150 at_timestamp_ns, // 150 151 view_count, // 151 152 &info); // 152 - IPC_CHK_ONLY_PRINT(ich->ipc_c, xret, "ipc_call_device_get_view_poses_2"); 153 + IPC_CHK_AND_RET(ich->ipc_c, xret, "ipc_call_device_get_view_poses_2"); 153 154 154 155 *out_head_relation = info.head_relation; 155 156 for (int i = 0; i < 2; i++) { ··· 159 160 160 161 } else if (view_count <= IPC_MAX_RAW_VIEWS) { 161 162 // Artificial limit. 162 - 163 - call_get_view_poses_raw( // 164 - ich, // 165 - default_eye_relation, // 166 - at_timestamp_ns, // 167 - view_count, // 168 - out_head_relation, // 169 - out_fovs, // 170 - out_poses); // 163 + xret = call_get_view_poses_raw( // 164 + ich, // 165 + default_eye_relation, // 166 + at_timestamp_ns, // 167 + view_count, // 168 + out_head_relation, // 169 + out_fovs, // 170 + out_poses); // 171 171 } else { 172 172 IPC_ERROR(ich->ipc_c, "Cannot handle %u view_count, %u or less supported.", view_count, 173 173 (uint32_t)IPC_MAX_RAW_VIEWS); 174 174 assert(false && !"Too large view_count!"); 175 175 } 176 + 177 + return xret; 176 178 } 177 179 178 180 static bool