The open source OpenXR runtime
1// Copyright 2020-2025, Collabora, Ltd.
2// SPDX-License-Identifier: BSL-1.0
3/*!
4 * @file
5 * @brief AHardwareBuffer backed image buffer allocator.
6 * @author Rylie Pavlik <rylie.pavlik@collabora.com>
7 * @author Simon Zeni <simon.zeni@collabora.com>
8 * @ingroup aux_android
9 */
10
11#include "android_ahardwarebuffer_allocator.h"
12
13#include "util/u_misc.h"
14#include "util/u_logging.h"
15#include "util/u_debug.h"
16#include "util/u_handles.h"
17
18#include "xrt/xrt_vulkan_includes.h"
19
20#ifdef XRT_GRAPHICS_BUFFER_HANDLE_IS_AHARDWAREBUFFER
21#include <android/hardware_buffer.h>
22
23DEBUG_GET_ONCE_LOG_OPTION(ahardwarebuffer_log, "AHARDWAREBUFFER_LOG", U_LOGGING_WARN)
24#define AHB_TRACE(...) U_LOG_IFL_T(debug_get_log_option_ahardwarebuffer_log(), __VA_ARGS__)
25#define AHB_DEBUG(...) U_LOG_IFL_D(debug_get_log_option_ahardwarebuffer_log(), __VA_ARGS__)
26#define AHB_INFO(...) U_LOG_IFL_I(debug_get_log_option_ahardwarebuffer_log(), __VA_ARGS__)
27#define AHB_WARN(...) U_LOG_IFL_W(debug_get_log_option_ahardwarebuffer_log(), __VA_ARGS__)
28#define AHB_ERROR(...) U_LOG_IFL_E(debug_get_log_option_ahardwarebuffer_log(), __VA_ARGS__)
29
30static inline enum AHardwareBuffer_Format
31vk_format_to_ahardwarebuffer(uint64_t format)
32{
33 switch (format) {
34 case VK_FORMAT_X8_D24_UNORM_PACK32: return AHARDWAREBUFFER_FORMAT_D24_UNORM;
35 case VK_FORMAT_D24_UNORM_S8_UINT: return AHARDWAREBUFFER_FORMAT_D24_UNORM_S8_UINT;
36 case VK_FORMAT_R5G6B5_UNORM_PACK16: return AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
37 case VK_FORMAT_D16_UNORM: return AHARDWAREBUFFER_FORMAT_D16_UNORM;
38 case VK_FORMAT_R8G8B8_UNORM: return AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM;
39 case VK_FORMAT_D32_SFLOAT_S8_UINT: return AHARDWAREBUFFER_FORMAT_D32_FLOAT_S8_UINT;
40 case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
41 case VK_FORMAT_S8_UINT: return AHARDWAREBUFFER_FORMAT_S8_UINT;
42 case VK_FORMAT_D32_SFLOAT: return AHARDWAREBUFFER_FORMAT_D32_FLOAT;
43 case VK_FORMAT_R16G16B16A16_SFLOAT: return AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
44 case VK_FORMAT_R8G8B8A8_SRGB:
45 /* apply EGL_GL_COLORSPACE_KHR, EGL_GL_COLORSPACE_SRGB_KHR! */
46 return AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
47 case VK_FORMAT_R8G8B8A8_UNORM: return AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
48 default: return 0;
49 }
50}
51
52static uint64_t
53swapchain_usage_to_ahardwarebuffer(enum xrt_swapchain_usage_bits bits)
54{
55 uint64_t ahb_usage = 0;
56 if (bits & (XRT_SWAPCHAIN_USAGE_SAMPLED | XRT_SWAPCHAIN_USAGE_INPUT_ATTACHMENT)) {
57 ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
58 }
59
60 if (bits & (XRT_SWAPCHAIN_USAGE_COLOR | XRT_SWAPCHAIN_USAGE_DEPTH_STENCIL)) {
61 ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER;
62 }
63
64 if (bits & XRT_SWAPCHAIN_CREATE_PROTECTED_CONTENT) {
65 ahb_usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
66 }
67
68 // Fallback if no bits are set
69 if (ahb_usage == 0) {
70 ahb_usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
71 }
72
73 return ahb_usage;
74}
75
76bool
77ahardwarebuffer_is_supported(uint64_t format, enum xrt_swapchain_usage_bits xbits)
78{
79 // Minimal buffer description to probe support
80 AHardwareBuffer_Desc desc = {
81 .width = 16,
82 .height = 16,
83 .layers = 1,
84 .format = vk_format_to_ahardwarebuffer(format),
85 .usage = swapchain_usage_to_ahardwarebuffer(xbits),
86 };
87
88#if __ANDROID_API__ >= 29
89 return AHardwareBuffer_isSupported(&desc);
90#else
91 AHardwareBuffer *buffer;
92 int ret = AHardwareBuffer_allocate(&desc, &buffer);
93 if (ret) {
94 return false;
95 }
96
97 AHardwareBuffer_release(buffer);
98 return true;
99#endif
100}
101
102xrt_result_t
103ahardwarebuffer_image_allocate(const struct xrt_swapchain_create_info *xsci, xrt_graphics_buffer_handle_t *out_image)
104{
105 AHardwareBuffer_Desc desc;
106 U_ZERO(&desc);
107 enum AHardwareBuffer_Format ahb_format = vk_format_to_ahardwarebuffer(xsci->format);
108 if (ahb_format == 0) {
109 AHB_ERROR("Could not convert %04" PRIx64 " to AHardwareBuffer_Format!", (uint64_t)xsci->format);
110 return XRT_ERROR_ALLOCATION;
111 }
112 desc.height = xsci->height;
113 desc.width = xsci->width;
114 desc.format = ahb_format;
115 desc.layers = xsci->array_size;
116 if (xsci->face_count == 6) {
117 desc.usage |= AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP;
118 desc.layers *= 6;
119 }
120 if (0 != (xsci->bits & (XRT_SWAPCHAIN_USAGE_COLOR | XRT_SWAPCHAIN_USAGE_DEPTH_STENCIL))) {
121 desc.usage |= AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER;
122 }
123
124 // The compositor always needs to sample the buffer, add the flag.
125 desc.usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
126
127 // Here if the above changes.
128 if (0 != (xsci->bits & XRT_SWAPCHAIN_USAGE_SAMPLED)) {
129 desc.usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
130 }
131 if (0 != (xsci->create & XRT_SWAPCHAIN_CREATE_PROTECTED_CONTENT)) {
132 desc.usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
133 }
134
135#if __ANDROID_API__ >= 29
136 if (0 == AHardwareBuffer_isSupported(&desc)) {
137 AHB_ERROR("Computed AHardwareBuffer_Desc is not supported.");
138 return XRT_ERROR_ALLOCATION;
139 }
140#endif
141
142 int ret = AHardwareBuffer_allocate(&desc, out_image);
143 if (ret != 0) {
144 AHB_ERROR("Failed allocating image.");
145 return XRT_ERROR_ALLOCATION;
146 }
147
148 return XRT_SUCCESS;
149}
150
151static xrt_result_t
152ahardwarebuffer_images_allocate(struct xrt_image_native_allocator *xina,
153 const struct xrt_swapchain_create_info *xsci,
154 size_t image_count,
155 struct xrt_image_native *out_images)
156{
157 AHardwareBuffer_Desc desc;
158 U_ZERO(&desc);
159 enum AHardwareBuffer_Format ahb_format = vk_format_to_ahardwarebuffer(xsci->format);
160 if (ahb_format == 0) {
161 AHB_ERROR("Could not convert %04" PRIx64 " to AHardwareBuffer_Format!", (uint64_t)xsci->format);
162 return XRT_ERROR_ALLOCATION;
163 }
164 desc.height = xsci->height;
165 desc.width = xsci->width;
166 desc.format = ahb_format;
167 desc.layers = xsci->array_size;
168 // Monado always samples layers
169 desc.usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
170 if (xsci->face_count == 6) {
171 desc.usage |= AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP;
172 desc.layers *= 6;
173 }
174 if (0 != (xsci->bits & (XRT_SWAPCHAIN_USAGE_COLOR | XRT_SWAPCHAIN_USAGE_DEPTH_STENCIL))) {
175 desc.usage |= AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER;
176 }
177 if (0 != (xsci->create & XRT_SWAPCHAIN_CREATE_PROTECTED_CONTENT)) {
178 desc.usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
179 }
180
181#if __ANDROID_API__ >= 29
182 if (0 == AHardwareBuffer_isSupported(&desc)) {
183 AHB_ERROR("Computed AHardwareBuffer_Desc is not supported.");
184 return XRT_ERROR_ALLOCATION;
185 }
186#endif
187
188 memset(out_images, 0, sizeof(*out_images) * image_count);
189 bool failed = false;
190 for (size_t i = 0; i < image_count; ++i) {
191 int ret = AHardwareBuffer_allocate(&desc, &(out_images[i].handle));
192 if (ret != 0) {
193 AHB_ERROR("Failed allocating image %d.", (int)i);
194 failed = true;
195 break;
196 }
197 }
198 if (failed) {
199 for (size_t i = 0; i < image_count; ++i) {
200 u_graphics_buffer_unref(&(out_images[i].handle));
201 }
202 return XRT_ERROR_ALLOCATION;
203 }
204 return XRT_SUCCESS;
205}
206
207static xrt_result_t
208ahardwarebuffer_images_free(struct xrt_image_native_allocator *xina,
209 size_t image_count,
210 struct xrt_image_native *images)
211{
212 for (size_t i = 0; i < image_count; ++i) {
213 u_graphics_buffer_unref(&(images[i].handle));
214 }
215 return XRT_SUCCESS;
216}
217static void
218ahardwarebuffer_destroy(struct xrt_image_native_allocator *xina)
219{
220 if (xina != NULL) {
221 free(xina);
222 }
223}
224
225struct xrt_image_native_allocator *
226android_ahardwarebuffer_allocator_create(void)
227{
228 struct xrt_image_native_allocator *xina = U_TYPED_CALLOC(struct xrt_image_native_allocator);
229 xina->images_allocate = ahardwarebuffer_images_allocate;
230 xina->images_free = ahardwarebuffer_images_free;
231 xina->destroy = ahardwarebuffer_destroy;
232 return xina;
233}
234
235#endif // XRT_GRAPHICS_BUFFER_HANDLE_IS_AHARDWAREBUFFER