A modern Music Player Daemon based on Rockbox open source high quality audio player
libadwaita
audio
rust
zig
deno
mpris
rockbox
mpd
1/***************************************************************************
2 * __________ __ ___.
3 * Open \______ \ ____ ____ | | _\_ |__ _______ ___
4 * Source | _// _ \_/ ___\| |/ /| __ \ / _ \ \/ /
5 * Jukebox | | ( <_> ) \___| < | \_\ ( <_> > < <
6 * Firmware |____|_ /\____/ \___ >__|_ \|___ /\____/__/\_ \
7 * \/ \/ \/ \/ \/
8 * $Id$
9 *
10 * Copyright (C) 2002 by Alan Korr
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation; either version 2
15 * of the License, or (at your option) any later version.
16 *
17 * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
18 * KIND, either express or implied.
19 *
20 ****************************************************************************/
21#ifndef _RBENDIAN_H_
22#define _RBENDIAN_H_
23
24#include "config.h"
25
26#ifdef OS_USE_BYTESWAP_H
27#include <byteswap.h>
28#endif
29
30#ifndef __MINGW32__
31#if defined(__APPLE__) || defined(CTRU)
32#include <sys/types.h>
33#else
34#include <endian.h>
35#endif
36#endif
37
38/* clear these out since we redefine them to be truely constant compatible */
39#undef swap16
40#undef swap32
41#undef swap64
42
43#undef letoh16
44#undef letoh32
45#undef letoh64
46#undef htole16
47#undef htole32
48#undef htole64
49#undef betoh16
50#undef betoh32
51#undef betoh64
52#undef htobe16
53#undef htobe32
54#undef htobe64
55
56/* static/generic endianness conversion */
57#define SWAP16_CONST(x) \
58 ((typeof(x))( ((uint16_t)(x) >> 8) | ((uint16_t)(x) << 8) ))
59
60#define SWAP32_CONST(x) \
61 ((typeof(x))( (((uint32_t)(x) & 0xff000000) >> 24) | \
62 (((uint32_t)(x) & 0x00ff0000) >> 8) | \
63 (((uint32_t)(x) & 0x0000ff00) << 8) | \
64 (((uint32_t)(x) & 0x000000ff) << 24) ))
65
66#define SWAP64_CONST(x) \
67 ((typeof(x))( (((uint64_t)(x) & 0xff00000000000000ull) >> 56) | \
68 (((uint64_t)(x) & 0x00ff000000000000ull) >> 40) | \
69 (((uint64_t)(x) & 0x0000ff0000000000ull) >> 24) | \
70 (((uint64_t)(x) & 0x000000ff00000000ull) >> 8) | \
71 (((uint64_t)(x) & 0x00000000ff000000ull) << 8) | \
72 (((uint64_t)(x) & 0x0000000000ff0000ull) << 24) | \
73 (((uint64_t)(x) & 0x000000000000ff00ull) << 40) | \
74 (((uint64_t)(x) & 0x00000000000000ffull) << 56) ))
75
76#define SWAP_ODD_EVEN32_CONST(x) \
77 ((typeof(x))( ((uint32_t)SWAP16_CONST((uint32_t)(x) >> 16) << 16) | \
78 SWAP16_CONST((uint32_t)(x))) )
79
80#define SWAW32_CONST(x) \
81 ((typeof(x))( ((uint32_t)(x) << 16) | ((uint32_t)(x) >> 16) ))
82
83
84#ifndef __ENDIAN_H_NATIVE_RB
85
86#if defined (__bswap_16)
87 #define __swap16_os(x) __bswap_16(x)
88 #define __swap32_os(x) __bswap_32(x)
89 #define __swap64_os(x) __bswap_64(x)
90#elif defined (bswap_16)
91 #define __swap16_os(x) bswap_16(x)
92 #define __swap32_os(x) bswap_32(x)
93 #define __swap64_os(x) bswap_64(x)
94#elif defined (__swap16)
95 #define __swap16_os(x) __swap16(x)
96 #define __swap32_os(x) __swap32(x)
97 #define __swap64_os(x) __swap64(x)
98#elif defined (swap16)
99 #define __swap16_os(x) swap16(x)
100 #define __swap32_os(x) swap32(x)
101 #define __swap64_os(x) swap64(x)
102#else
103 /* kinda hacky but works */
104 #define __swap16_os(x) SWAP16_CONST(x)
105 #define __swap32_os(x) SWAP32_CONST(x)
106 #define __swap64_os(x) SWAP64_CONST(x)
107#endif
108
109/* wrap these because they aren't always compatible with compound initializers */
110static FORCE_INLINE uint16_t swap16_hw(uint16_t x)
111 { return __swap16_os(x); }
112static FORCE_INLINE uint32_t swap32_hw(uint32_t x)
113 { return __swap32_os(x); }
114static FORCE_INLINE uint64_t swap64_hw(uint64_t x)
115 { return __swap64_os(x); }
116
117#endif /* __ENDIAN_H_NATIVE_RB */
118
119#if defined(NEED_GENERIC_BYTESWAPS) || !defined(__ENDIAN_H_NATIVE_RB)
120/* these are uniquely ours it seems */
121static inline uint32_t swap_odd_even32_hw(uint32_t value)
122 /*
123 * result[31..24],[15.. 8] = value[23..16],[ 7.. 0]
124 * result[23..16],[ 7.. 0] = value[31..24],[15.. 8]
125 */
126{
127 uint32_t t = value & 0xff00ff00;
128 return (t >> 8) | ((t ^ value) << 8);
129}
130
131static inline uint32_t swaw32_hw(uint32_t value)
132 /*
133 * result[31..16] = value[15.. 0];
134 * result[15.. 0] = value[31..16];
135 */
136{
137 return (value >> 16) | (value << 16);
138}
139#endif /* Generic */
140
141/* select best method based upon whether x is a constant expression */
142#define swap16(x) \
143 ( __builtin_constant_p(x) ? SWAP16_CONST(x) : (typeof(x))swap16_hw(x) )
144
145#define swap32(x) \
146 ( __builtin_constant_p(x) ? SWAP32_CONST(x) : (typeof(x))swap32_hw(x) )
147
148#define swap64(x) \
149 ( __builtin_constant_p(x) ? SWAP64_CONST(x) : (typeof(x))swap64_hw(x) )
150
151#define swap_odd_even32(x) \
152 ( __builtin_constant_p(x) ? SWAP_ODD_EVEN32_CONST(x) : (typeof(x))swap_odd_even32_hw(x) )
153
154#define swaw32(x) \
155 ( __builtin_constant_p(x) ? SWAW32_CONST(x) : (typeof(x))swaw32_hw(x) )
156
157#if defined(ROCKBOX_LITTLE_ENDIAN)
158 #define letoh16(x) (x)
159 #define letoh32(x) (x)
160 #define letoh64(x) (x)
161 #define htole16(x) (x)
162 #define htole32(x) (x)
163 #define htole64(x) (x)
164 #define betoh16(x) swap16(x)
165 #define betoh32(x) swap32(x)
166 #define betoh64(x) swap64(x)
167 #define htobe16(x) swap16(x)
168 #define htobe32(x) swap32(x)
169 #define htobe64(x) swap64(x)
170 #define swap_odd_even_be32(x) (x)
171 #define swap_odd_even_le32(x) swap_odd_even32(x)
172#elif defined(ROCKBOX_BIG_ENDIAN)
173 #define letoh16(x) swap16(x)
174 #define letoh32(x) swap32(x)
175 #define letoh64(x) swap64(x)
176 #define htole16(x) swap16(x)
177 #define htole32(x) swap32(x)
178 #define htole64(x) swap64(x)
179 #define betoh16(x) (x)
180 #define betoh32(x) (x)
181 #define betoh64(x) (x)
182 #define htobe16(x) (x)
183 #define htobe32(x) (x)
184 #define htobe64(x) (x)
185 #define swap_odd_even_be32(x) swap_odd_even32(x)
186 #define swap_odd_even_le32(x) (x)
187#else
188 #error "Unknown endianness!"
189#endif
190
191/*
192 * Generic unaligned loads
193 */
194static inline uint16_t _generic_load_le16(const void* p)
195{
196 const uint8_t* d = p;
197 return d[0] | (d[1] << 8);
198}
199
200static inline uint32_t _generic_load_le32(const void* p)
201{
202 const uint8_t* d = p;
203 return d[0] | (d[1] << 8) | (d[2] << 16) | (d[3] << 24);
204}
205
206static inline uint64_t _generic_load_le64(const void* p)
207{
208 const uint8_t* d = p;
209 return (((uint64_t)d[0] << 0) | ((uint64_t)d[1] << 8) |
210 ((uint64_t)d[2] << 16) | ((uint64_t)d[3] << 24) |
211 ((uint64_t)d[4] << 32) | ((uint64_t)d[5] << 40) |
212 ((uint64_t)d[6] << 48) | ((uint64_t)d[7] << 56));
213}
214
215static inline uint16_t _generic_load_be16(const void* p)
216{
217 const uint8_t* d = p;
218 return (d[0] << 8) | d[1];
219}
220
221static inline uint32_t _generic_load_be32(const void* p)
222{
223 const uint8_t* d = p;
224 return (d[0] << 24) | (d[1] << 16) | (d[2] << 8) | d[3];
225}
226
227static inline uint64_t _generic_load_be64(const void* p)
228{
229 const uint8_t* d = p;
230 return (((uint64_t)d[0] << 56) | ((uint64_t)d[1] << 48) |
231 ((uint64_t)d[2] << 40) | ((uint64_t)d[3] << 32) |
232 ((uint64_t)d[4] << 24) | ((uint64_t)d[5] << 16) |
233 ((uint64_t)d[6] << 8) | ((uint64_t)d[7] << 0));
234}
235
236static inline void _generic_store_le16(void* p, uint16_t val)
237{
238 uint8_t* d = p;
239 d[0] = val & 0xff;
240 d[1] = (val >> 8) & 0xff;
241}
242
243static inline void _generic_store_le32(void* p, uint32_t val)
244{
245 uint8_t* d = p;
246 d[0] = val & 0xff;
247 d[1] = (val >> 8) & 0xff;
248 d[2] = (val >> 16) & 0xff;
249 d[3] = (val >> 24) & 0xff;
250}
251
252static inline void _generic_store_le64(void* p, uint64_t val)
253{
254 uint8_t* d = p;
255 d[0] = val & 0xff;
256 d[1] = (val >> 8) & 0xff;
257 d[2] = (val >> 16) & 0xff;
258 d[3] = (val >> 24) & 0xff;
259 d[4] = (val >> 32) & 0xff;
260 d[5] = (val >> 40) & 0xff;
261 d[6] = (val >> 48) & 0xff;
262 d[7] = (val >> 56) & 0xff;
263}
264
265static inline void _generic_store_be16(void* p, uint16_t val)
266{
267 uint8_t* d = p;
268 d[0] = (val >> 8) & 0xff;
269 d[1] = val & 0xff;
270}
271
272static inline void _generic_store_be32(void* p, uint32_t val)
273{
274 uint8_t* d = p;
275 d[0] = (val >> 24) & 0xff;
276 d[1] = (val >> 16) & 0xff;
277 d[2] = (val >> 8) & 0xff;
278 d[3] = val & 0xff;
279}
280
281static inline void _generic_store_be64(void* p, uint64_t val)
282{
283 uint8_t* d = p;
284 d[0] = (val >> 56) & 0xff;
285 d[1] = (val >> 48) & 0xff;
286 d[2] = (val >> 40) & 0xff;
287 d[3] = (val >> 32) & 0xff;
288 d[4] = (val >> 24) & 0xff;
289 d[5] = (val >> 16) & 0xff;
290 d[6] = (val >> 8) & 0xff;
291 d[7] = val & 0xff;
292}
293
294#if !defined(HAVE_UNALIGNED_LOAD_STORE)
295
296/* Use generic unaligned loads */
297#define load_le16 _generic_load_le16
298#define load_le32 _generic_load_le32
299#define load_le64 _generic_load_le64
300#define load_be16 _generic_load_be16
301#define load_be32 _generic_load_be32
302#define load_be64 _generic_load_be64
303#define store_le16 _generic_store_le16
304#define store_le32 _generic_store_le32
305#define store_le64 _generic_store_le64
306#define store_be16 _generic_store_be16
307#define store_be32 _generic_store_be32
308#define store_be64 _generic_store_be64
309
310/* Define host byte order unaligned load */
311#if defined(ROCKBOX_LITTLE_ENDIAN)
312# define load_h16 load_le16
313# define load_h32 load_le32
314# define load_h64 load_le64
315# define store_h16 store_le16
316# define store_h32 store_le32
317# define store_h64 store_le64
318#elif defined(ROCKBOX_BIG_ENDIAN)
319# define load_h16 load_be16
320# define load_h32 load_be32
321# define load_h64 load_be64
322# define store_h16 store_be16
323# define store_h32 store_be32
324# define store_h64 store_be64
325#else
326# error
327#endif
328
329#else /* HAVE_UNALIGNED_LOAD_STORE */
330
331/* The arch should define unaligned loads in host byte order */
332#if defined(ROCKBOX_LITTLE_ENDIAN)
333# define load_le16 load_h16
334# define load_le32 load_h32
335# define load_le64 load_h64
336# define load_be16(p) swap16(load_h16((p)))
337# define load_be32(p) swap32(load_h32((p)))
338# define load_be64(p) swap64(load_h64((p)))
339# define store_le16 store_h16
340# define store_le32 store_h32
341# define store_le64 store_h64
342# define store_be16(p,v) store_h16((p),swap16((v)))
343# define store_be32(p,v) store_h32((p),swap32((v)))
344# define store_be64(p,v) store_h64((p),swap64((v)))
345#elif defined(ROCKBOX_BIG_ENDIAN)
346# define load_le16(p) swap16(load_h16((p)))
347# define load_le32(p) swap32(load_h32((p)))
348# define load_le64(p) swap64(load_h64((p)))
349# define load_be16 load_h16
350# define load_be32 load_h32
351# define load_be64 load_h64
352# define store_le16(p,v) store_h16((p),swap16((v)))
353# define store_le32(p,v) store_h32((p),swap32((v)))
354# define store_le64(p,v) store_h64((p),swap64((v)))
355# define store_be16 store_h16
356# define store_be32 store_h32
357# define store_be64 store_h64
358#else
359# error
360#endif
361
362#endif /* HAVE_UNALIGNED_LOAD_STORE */
363
364/*
365 * Aligned loads
366 */
367
368static inline uint16_t load_h16_aligned(const void* p)
369{
370 return *(const uint16_t*)p;
371}
372
373static inline uint32_t load_h32_aligned(const void* p)
374{
375 return *(const uint32_t*)p;
376}
377
378static inline uint64_t load_h64_aligned(const void* p)
379{
380 return *(const uint64_t*)p;
381}
382
383static inline void store_h16_aligned(void* p, uint16_t val)
384{
385 *(uint16_t*)p = val;
386}
387
388static inline void store_h32_aligned(void* p, uint32_t val)
389{
390 *(uint32_t*)p = val;
391}
392
393static inline void store_h64_aligned(void* p, uint64_t val)
394{
395 *(uint64_t*)p = val;
396}
397
398#if defined(ROCKBOX_LITTLE_ENDIAN)
399# define load_le16_aligned load_h16_aligned
400# define load_le32_aligned load_h32_aligned
401# define load_le64_aligned load_h64_aligned
402# define load_be16_aligned(p) swap16(load_h16_aligned((p)))
403# define load_be32_aligned(p) swap32(load_h32_aligned((p)))
404# define load_be64_aligned(p) swap64(load_h64_aligned((p)))
405# define store_le16_aligned store_h16_aligned
406# define store_le32_aligned store_h32_aligned
407# define store_le64_aligned store_h64_aligned
408# define store_be16_aligned(p,v) store_h16_aligned((p),swap16((v)))
409# define store_be32_aligned(p,v) store_h32_aligned((p),swap32((v)))
410# define store_be64_aligned(p,v) store_h64_aligned((p),swap64((v)))
411#elif defined(ROCKBOX_BIG_ENDIAN)
412# define load_le16_aligned(p) swap16(load_h16_aligned((p)))
413# define load_le32_aligned(p) swap32(load_h32_aligned((p)))
414# define load_le64_aligned(p) swap64(load_h64_aligned((p)))
415# define load_be16_aligned load_h16_aligned
416# define load_be32_aligned load_h32_aligned
417# define load_be64_aligned load_h64_aligned
418# define store_le16_aligned(p,v) store_h16_aligned((p),swap16((v)))
419# define store_le32_aligned(p,v) store_h32_aligned((p),swap32((v)))
420# define store_le64_aligned(p,v) store_h64_aligned((p),swap64((v)))
421# define store_be16_aligned store_h16_aligned
422# define store_be32_aligned store_h32_aligned
423# define store_be64_aligned store_h64_aligned
424#else
425# error "Unknown endian!"
426#endif
427
428#endif /* _RBENDIAN_H_ */