The open source OpenXR runtime

c/util: Improve layer squasher CS shader UBO

Don't use XRT_MAX_VIEWS as it's wrongly used here.

Part-of: <https://gitlab.freedesktop.org/monado/monado/-/merge_requests/2619>

authored by

Jakob Bornecrantz and committed by
Marge Bot
67dc3556 c5ed968e

+61 -27
+23 -10
src/xrt/compositor/render/render_interface.h
··· 1 1 // Copyright 2019-2023, Collabora, Ltd. 2 + // Copyright 2025, NVIDIA CORPORATION. 2 3 // SPDX-License-Identifier: BSL-1.0 3 4 /*! 4 5 * @file ··· 1207 1208 struct 1208 1209 { 1209 1210 uint32_t value; 1210 - uint32_t padding[3]; 1211 + uint32_t padding[3]; // Padding up to a vec4. 1211 1212 } layer_count; 1212 1213 1213 1214 struct xrt_normalized_rect pre_transform; 1214 1215 struct xrt_normalized_rect post_transforms[RENDER_MAX_LAYERS]; 1215 1216 1216 - //! std140 uvec2, corresponds to enum xrt_layer_type and unpremultiplied alpha. 1217 + /*! 1218 + * Corresponds to enum xrt_layer_type and unpremultiplied alpha. 1219 + * 1220 + * std140 uvec2, because it is an array it gets padded to vec4. 1221 + */ 1217 1222 struct 1218 1223 { 1219 - uint32_t val; 1220 - uint32_t unpremultiplied; 1221 - uint32_t padding[XRT_MAX_VIEWS]; 1222 - } layer_type[RENDER_MAX_LAYERS]; 1224 + uint32_t layer_type; 1225 + uint32_t unpremultiplied_alpha; 1226 + uint32_t _padding0; 1227 + uint32_t _padding1; 1228 + } layer_data[RENDER_MAX_LAYERS]; 1223 1229 1224 - //! Which image/sampler(s) correspond to each layer. 1230 + /*! 1231 + * Which image/sampler(s) correspond to each layer. 1232 + * 1233 + * std140 uvec2, because it is an array it gets padded to vec4. 1234 + */ 1225 1235 struct 1226 1236 { 1227 - uint32_t images[XRT_MAX_VIEWS]; 1237 + uint32_t color_image_index; 1238 + uint32_t depth_image_index; 1239 + 1228 1240 //! @todo Implement separated samplers and images (and change to samplers[2]) 1229 - uint32_t padding[XRT_MAX_VIEWS]; 1230 - } images_samplers[RENDER_MAX_LAYERS]; 1241 + uint32_t _padding0; 1242 + uint32_t _padding1; 1243 + } image_info[RENDER_MAX_LAYERS]; 1231 1244 1232 1245 //! Shared between cylinder and equirect2. 1233 1246 struct xrt_matrix_4x4 mv_inverse[RENDER_MAX_LAYERS];
+30 -9
src/xrt/compositor/shaders/layer.comp
··· 11 11 #include "layer_defines.inc.glsl" 12 12 13 13 14 + struct layer_data 15 + { 16 + uint layer_type; 17 + uint unpremultiplied_alpha; 18 + 19 + // This struct is used in an array, gets padded to vec4. 20 + uint _padding0; 21 + uint _padding1; 22 + }; 23 + 24 + struct image_info 25 + { 26 + uint color_image_index; 27 + uint depth_image_index; 28 + 29 + // This struct is used in an array, gets padded to vec4. 30 + uint _padding0; 31 + uint _padding1; 32 + }; 33 + 34 + 14 35 const float PI = acos(-1); 15 36 16 37 // Should we do timewarp. ··· 34 55 vec4 pre_transform; 35 56 vec4 post_transform[RENDER_MAX_LAYERS]; 36 57 37 - // corresponds to enum xrt_layer_type 38 - uvec2 layer_type_and_unpremultiplied[RENDER_MAX_LAYERS]; 58 + // Per-layer data. 59 + layer_data layer_data[RENDER_MAX_LAYERS]; 39 60 40 61 // which image/sampler(s) correspond to each layer 41 - ivec2 images_samplers[RENDER_MAX_LAYERS]; 62 + image_info image_info[RENDER_MAX_LAYERS]; 42 63 43 64 // shared between cylinder and equirect2 44 65 mat4 mv_inverse[RENDER_MAX_LAYERS]; ··· 220 241 221 242 vec2 uv_sub = fma(sample_point, ubo.post_transform[layer].zw, ubo.post_transform[layer].xy); 222 243 223 - uint index = ubo.images_samplers[layer].x; 244 + uint index = ubo.image_info[layer].color_image_index; 224 245 #ifdef DEBUG 225 246 out_color += texture(source[index], uv_sub) / 2.f; 226 247 #else ··· 319 340 320 341 vec2 uv_sub = fma(sample_point, ubo.post_transform[layer].zw, ubo.post_transform[layer].xy); 321 342 322 - uint index = ubo.images_samplers[layer].x; 343 + uint index = ubo.image_info[layer].color_image_index; 323 344 #ifdef DEBUG 324 345 out_color += texture(source[index], uv_sub) / 2.0; 325 346 #else ··· 335 356 336 357 vec4 do_projection(vec2 view_uv, uint layer) 337 358 { 338 - uint source_image_index = ubo.images_samplers[layer].x; 359 + uint source_image_index = ubo.image_info[layer].color_image_index; 339 360 340 361 // Do any transformation needed. 341 362 vec2 uv = transform_uv(view_uv, layer); ··· 367 388 368 389 vec4 do_quad(vec2 view_uv, uint layer) 369 390 { 370 - uint source_image_index = ubo.images_samplers[layer].x; 391 + uint source_image_index = ubo.image_info[layer].color_image_index; 371 392 372 393 // center point of the plane in view space. 373 394 vec3 quad_position = ubo.quad_position[layer].xyz; ··· 453 474 for (uint layer = 0; layer < layer_count; layer++) { 454 475 vec4 rgba = vec4(0, 0, 0, 0); 455 476 456 - switch (ubo.layer_type_and_unpremultiplied[layer].x) { 477 + switch (ubo.layer_data[layer].layer_type) { 457 478 case LAYER_COMP_TYPE_QUAD: 458 479 rgba = do_quad(view_uv, layer); 459 480 break; ··· 469 490 default: break; 470 491 } 471 492 472 - if (ubo.layer_type_and_unpremultiplied[layer].y != 0) { 493 + if (ubo.layer_data[layer].unpremultiplied_alpha != 0) { 473 494 // Unpremultipled blend factor of src.a. 474 495 accum.rgb = mix(accum.rgb, rgba.rgb, rgba.a); 475 496 } else {
+8 -8
src/xrt/compositor/util/comp_render_cs.c
··· 136 136 ubo_data->cylinder_data[cur_layer].central_angle = c->central_angle; 137 137 ubo_data->cylinder_data[cur_layer].aspect_ratio = c->aspect_ratio; 138 138 139 - ubo_data->images_samplers[cur_layer].images[0] = cur_image; 139 + ubo_data->image_info[cur_layer].color_image_index = cur_image; 140 140 cur_image++; 141 141 142 142 *out_cur_image = cur_image; ··· 199 199 ubo_data->eq2_data[cur_layer].upper_vertical_angle = eq2->upper_vertical_angle; 200 200 ubo_data->eq2_data[cur_layer].lower_vertical_angle = eq2->lower_vertical_angle; 201 201 202 - ubo_data->images_samplers[cur_layer].images[0] = cur_image; 202 + ubo_data->image_info[cur_layer].color_image_index = cur_image; 203 203 cur_image++; 204 204 205 205 *out_cur_image = cur_image; ··· 237 237 // Color 238 238 src_samplers[cur_image] = clamp_to_border_black; 239 239 src_image_views[cur_image] = get_image_view(image, layer_data->flags, array_index); 240 - ubo_data->images_samplers[cur_layer + 0].images[0] = cur_image++; 240 + ubo_data->image_info[cur_layer + 0].color_image_index = cur_image++; 241 241 242 242 // Depth 243 243 if (layer_data->type == XRT_LAYER_PROJECTION_DEPTH) { ··· 247 247 248 248 src_samplers[cur_image] = clamp_to_edge; // Edge to keep depth stable at edges. 249 249 src_image_views[cur_image] = get_image_view(d_image, layer_data->flags, d_array_index); 250 - ubo_data->images_samplers[cur_layer + 0].images[1] = cur_image++; 250 + ubo_data->image_info[cur_layer + 0].depth_image_index = cur_image++; 251 251 } 252 252 253 253 set_post_transform_rect( // ··· 346 346 ubo_data->quad_position[cur_layer].val = quad_position; 347 347 ubo_data->quad_normal[cur_layer].val = normal_view_space; 348 348 ubo_data->inverse_quad_transform[cur_layer] = inverse_quad_transform; 349 - ubo_data->images_samplers[cur_layer].images[0] = cur_image; 349 + ubo_data->image_info[cur_layer].color_image_index = cur_image; 350 350 cur_image++; 351 351 352 352 *out_cur_image = cur_image; ··· 683 683 continue; 684 684 } 685 685 686 - ubo_data->layer_type[cur_layer].val = xrt_layer_to_cs_layer_type(data); 687 - ubo_data->layer_type[cur_layer].unpremultiplied = is_layer_unpremultiplied(data); 686 + ubo_data->layer_data[cur_layer].layer_type = xrt_layer_to_cs_layer_type(data); 687 + ubo_data->layer_data[cur_layer].unpremultiplied_alpha = is_layer_unpremultiplied(data); 688 688 689 689 // Finally okay to increment the current layer. 690 690 cur_layer++; ··· 694 694 ubo_data->layer_count.value = cur_layer; 695 695 696 696 for (uint32_t i = cur_layer; i < RENDER_MAX_LAYERS; i++) { 697 - ubo_data->layer_type[i].val = LAYER_COMP_TYPE_NOOP; // Explicit no-op. 697 + ubo_data->layer_data[i].layer_type = LAYER_COMP_TYPE_NOOP; // Explicit no-op. 698 698 } 699 699 700 700 //! @todo: If Vulkan 1.2, use VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT and skip this