···227 const auto res = GetModuleFileNameA( mod[i], name, 1021 );
228 if( res > 0 )
229 {
0000230 auto ptr = name + res;
231 while( ptr > name && *ptr != '\\' && *ptr != '/' ) ptr--;
232 if( ptr > name ) ptr++;
···227 const auto res = GetModuleFileNameA( mod[i], name, 1021 );
228 if( res > 0 )
229 {
230+ // This may be a new module loaded since our call to SymInitialize.
231+ // Just in case, force DbgHelp to load its pdb !
232+ SymLoadModuleEx(proc, NULL, name, NULL, (DWORD64)info.lpBaseOfDll, info.SizeOfImage, NULL, 0);
233+234 auto ptr = name + res;
235 while( ptr > name && *ptr != '\\' && *ptr != '/' ) ptr--;
236 if( ptr > name ) ptr++;
···210 }
211 };
212213-#ifdef _MSC_VER
214-#pragma warning(push)
215-#pragma warning(disable: 4554)
216-#endif
217 template<typename T>
218 static inline bool circular_less_than(T a, T b)
219 {
220 static_assert(std::is_integral<T>::value && !std::numeric_limits<T>::is_signed, "circular_less_than is intended to be used only with unsigned integer types");
221- return static_cast<T>(a - b) > (static_cast<T>(static_cast<T>(1) << static_cast<T>(sizeof(T) * CHAR_BIT - 1)));
00222 }
223-#ifdef _MSC_VER
224-#pragma warning(pop)
225-#endif
226227 template<typename U>
228 static inline char* align_for(char* ptr)
···210 }
211 };
2120000213 template<typename T>
214 static inline bool circular_less_than(T a, T b)
215 {
216 static_assert(std::is_integral<T>::value && !std::numeric_limits<T>::is_signed, "circular_less_than is intended to be used only with unsigned integer types");
217+ return static_cast<T>(a - b) > static_cast<T>(static_cast<T>(1) << (static_cast<T>(sizeof(T) * CHAR_BIT - 1)));
218+ // Note: extra parens around rhs of operator<< is MSVC bug: https://developercommunity2.visualstudio.com/t/C4554-triggers-when-both-lhs-and-rhs-is/10034931
219+ // silencing the bug requires #pragma warning(disable: 4554) around the calling code and has no effect when done here.
220 }
000221222 template<typename U>
223 static inline char* align_for(char* ptr)
···193#undef STT_FUNC
194#undef NT_GNU_BUILD_ID
195#undef ELFCOMPRESS_ZLIB
0196197/* Basic types. */
198···350#endif /* BACKTRACE_ELF_SIZE != 32 */
351352#define ELFCOMPRESS_ZLIB 1
0353354/* Names of sections, indexed by enum dwarf_section in internal.h. */
355···1130 on error. */
11311132static int
1133-elf_zlib_fetch (const unsigned char **ppin, const unsigned char *pinend,
1134 uint64_t *pval, unsigned int *pbits)
1135{
1136 unsigned int bits;
···1177 return 1;
1178}
117900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001180/* Huffman code tables, like the rest of the zlib format, are defined
1181 by RFC 1951. We store a Huffman code table as a series of tables
1182 stored sequentially in memory. Each entry in a table is 16 bits.
···1211/* Number of entries we allocate to for one code table. We get a page
1212 for the two code tables we need. */
12131214-#define HUFFMAN_TABLE_SIZE (1024)
12151216/* Bit masks and shifts for the values in the table. */
12171218-#define HUFFMAN_VALUE_MASK 0x01ff
1219-#define HUFFMAN_BITS_SHIFT 9
1220-#define HUFFMAN_BITS_MASK 0x7
1221-#define HUFFMAN_SECONDARY_SHIFT 12
12221223/* For working memory while inflating we need two code tables, we need
1224 an array of code lengths (max value 15, so we use unsigned char),
···1226 latter two arrays must be large enough to hold the maximum number
1227 of code lengths, which RFC 1951 defines as 286 + 30. */
12281229-#define ZDEBUG_TABLE_SIZE \
1230- (2 * HUFFMAN_TABLE_SIZE * sizeof (uint16_t) \
1231 + (286 + 30) * sizeof (uint16_t) \
1232 + (286 + 30) * sizeof (unsigned char))
12331234-#define ZDEBUG_TABLE_CODELEN_OFFSET \
1235- (2 * HUFFMAN_TABLE_SIZE * sizeof (uint16_t) \
1236 + (286 + 30) * sizeof (uint16_t))
12371238-#define ZDEBUG_TABLE_WORK_OFFSET \
1239- (2 * HUFFMAN_TABLE_SIZE * sizeof (uint16_t))
12401241#ifdef BACKTRACE_GENERATE_FIXED_HUFFMAN_TABLE
1242···1269 next value after VAL with the same bit length. */
12701271 next = (uint16_t *) (((unsigned char *) zdebug_table)
1272- + ZDEBUG_TABLE_WORK_OFFSET);
12731274 memset (&count[0], 0, 16 * sizeof (uint16_t));
1275 for (i = 0; i < codes_len; ++i)
···1297 /* For each length, fill in the table for the codes of that
1298 length. */
12991300- memset (table, 0, HUFFMAN_TABLE_SIZE * sizeof (uint16_t));
13011302 /* Handle the values that do not require a secondary table. */
1303···1331 /* In the compressed bit stream, the value VAL is encoded as
1332 J bits with the value C. */
13331334- if (unlikely ((val & ~HUFFMAN_VALUE_MASK) != 0))
1335 {
1336 elf_uncompress_failed ();
1337 return 0;
1338 }
13391340- tval = val | ((j - 1) << HUFFMAN_BITS_SHIFT);
13411342 /* The table lookup uses 8 bits. If J is less than 8, we
1343 don't know what the other bits will be. We need to fill
···1487 {
1488 /* Start a new secondary table. */
14891490- if (unlikely ((next_secondary & HUFFMAN_VALUE_MASK)
1491 != next_secondary))
1492 {
1493 elf_uncompress_failed ();
···1498 secondary_bits = j - 8;
1499 next_secondary += 1 << secondary_bits;
1500 table[primary] = (secondary
1501- + ((j - 8) << HUFFMAN_BITS_SHIFT)
1502- + (1U << HUFFMAN_SECONDARY_SHIFT));
1503 }
1504 else
1505 {
1506 /* There is an existing entry. It had better be a
1507 secondary table with enough bits. */
1508- if (unlikely ((tprimary & (1U << HUFFMAN_SECONDARY_SHIFT))
01509 == 0))
1510 {
1511 elf_uncompress_failed ();
1512 return 0;
1513 }
1514- secondary = tprimary & HUFFMAN_VALUE_MASK;
1515- secondary_bits = ((tprimary >> HUFFMAN_BITS_SHIFT)
1516- & HUFFMAN_BITS_MASK);
1517 if (unlikely (secondary_bits < j - 8))
1518 {
1519 elf_uncompress_failed ();
···15241525 /* Fill in secondary table entries. */
15261527- tval = val | ((j - 8) << HUFFMAN_BITS_SHIFT);
15281529 for (ind = code >> 8;
1530 ind < (1U << secondary_bits);
···15671568#include <stdio.h>
15691570-static uint16_t table[ZDEBUG_TABLE_SIZE];
1571static unsigned char codes[288];
15721573int
···1795 const uint16_t *tlit;
1796 const uint16_t *tdist;
17971798- if (!elf_zlib_fetch (&pin, pinend, &val, &bits))
1799 return 0;
18001801 last = val & 1;
···1883 /* Read a Huffman encoding table. The various magic
1884 numbers here are from RFC 1951. */
18851886- if (!elf_zlib_fetch (&pin, pinend, &val, &bits))
1887 return 0;
18881889 nlit = (val & 0x1f) + 257;
···1908 /* There are always at least 4 elements in the
1909 table. */
19101911- if (!elf_zlib_fetch (&pin, pinend, &val, &bits))
1912 return 0;
19131914 codebits[16] = val & 7;
···1928 if (nclen == 5)
1929 goto codebitsdone;
19301931- if (!elf_zlib_fetch (&pin, pinend, &val, &bits))
1932 return 0;
19331934 codebits[7] = val & 7;
···1966 if (nclen == 10)
1967 goto codebitsdone;
19681969- if (!elf_zlib_fetch (&pin, pinend, &val, &bits))
1970 return 0;
19711972 codebits[11] = val & 7;
···2004 if (nclen == 15)
2005 goto codebitsdone;
20062007- if (!elf_zlib_fetch (&pin, pinend, &val, &bits))
2008 return 0;
20092010 codebits[2] = val & 7;
···2043 at the end of zdebug_table to hold them. */
20442045 plenbase = (((unsigned char *) zdebug_table)
2046- + ZDEBUG_TABLE_CODELEN_OFFSET);
2047 plen = plenbase;
2048 plenend = plen + nlit + ndist;
2049 while (plen < plenend)
···2052 unsigned int b;
2053 uint16_t v;
20542055- if (!elf_zlib_fetch (&pin, pinend, &val, &bits))
2056 return 0;
20572058 t = zdebug_table[val & 0xff];
20592060 /* The compression here uses bit lengths up to 7, so
2061 a secondary table is never necessary. */
2062- if (unlikely ((t & (1U << HUFFMAN_SECONDARY_SHIFT)) != 0))
02063 {
2064 elf_uncompress_failed ();
2065 return 0;
2066 }
20672068- b = (t >> HUFFMAN_BITS_SHIFT) & HUFFMAN_BITS_MASK;
2069 val >>= b + 1;
2070 bits -= b + 1;
20712072- v = t & HUFFMAN_VALUE_MASK;
2073 if (v < 16)
2074 *plen++ = v;
2075 else if (v == 16)
···2086 }
20872088 /* We used up to 7 bits since the last
2089- elf_zlib_fetch, so we have at least 8 bits
2090 available here. */
20912092 c = 3 + (val & 0x3);
···2121 /* Store zero 3 to 10 times. */
21222123 /* We used up to 7 bits since the last
2124- elf_zlib_fetch, so we have at least 8 bits
2125 available here. */
21262127 c = 3 + (val & 0x7);
···2167 /* Store zero 11 to 138 times. */
21682169 /* We used up to 7 bits since the last
2170- elf_zlib_fetch, so we have at least 8 bits
2171 available here. */
21722173 c = 11 + (val & 0x7f);
···2204 zdebug_table))
2205 return 0;
2206 if (!elf_zlib_inflate_table (plen + nlit, ndist, zdebug_table,
2207- zdebug_table + HUFFMAN_TABLE_SIZE))
02208 return 0;
2209 tlit = zdebug_table;
2210- tdist = zdebug_table + HUFFMAN_TABLE_SIZE;
2211 }
22122213 /* Inflate values until the end of the block. This is the
···2220 uint16_t v;
2221 unsigned int lit;
22222223- if (!elf_zlib_fetch (&pin, pinend, &val, &bits))
2224 return 0;
22252226 t = tlit[val & 0xff];
2227- b = (t >> HUFFMAN_BITS_SHIFT) & HUFFMAN_BITS_MASK;
2228- v = t & HUFFMAN_VALUE_MASK;
22292230- if ((t & (1U << HUFFMAN_SECONDARY_SHIFT)) == 0)
2231 {
2232 lit = v;
2233 val >>= b + 1;
···2236 else
2237 {
2238 t = tlit[v + 0x100 + ((val >> 8) & ((1U << b) - 1))];
2239- b = (t >> HUFFMAN_BITS_SHIFT) & HUFFMAN_BITS_MASK;
2240- lit = t & HUFFMAN_VALUE_MASK;
2241 val >>= b + 8;
2242 bits -= b + 8;
2243 }
···2282 {
2283 unsigned int extra;
22842285- if (!elf_zlib_fetch (&pin, pinend, &val, &bits))
2286 return 0;
22872288 /* This is an expression for the table of length
···2297 bits -= extra;
2298 }
22992300- if (!elf_zlib_fetch (&pin, pinend, &val, &bits))
2301 return 0;
23022303 t = tdist[val & 0xff];
2304- b = (t >> HUFFMAN_BITS_SHIFT) & HUFFMAN_BITS_MASK;
2305- v = t & HUFFMAN_VALUE_MASK;
23062307- if ((t & (1U << HUFFMAN_SECONDARY_SHIFT)) == 0)
2308 {
2309 dist = v;
2310 val >>= b + 1;
···2313 else
2314 {
2315 t = tdist[v + 0x100 + ((val >> 8) & ((1U << b) - 1))];
2316- b = (t >> HUFFMAN_BITS_SHIFT) & HUFFMAN_BITS_MASK;
2317- dist = t & HUFFMAN_VALUE_MASK;
02318 val >>= b + 8;
2319 bits -= b + 8;
2320 }
···2354 {
2355 unsigned int extra;
23562357- if (!elf_zlib_fetch (&pin, pinend, &val, &bits))
2358 return 0;
23592360 /* This is an expression for the table of
···2559 return 1;
2560}
2561000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002562/* Uncompress the old compressed debug format, the one emitted by
2563 --compress-debug-sections=zlib-gnu. The compressed data is in
2564 COMPRESSED / COMPRESSED_SIZE, and the function writes to
···2628 unsigned char **uncompressed, size_t *uncompressed_size)
2629{
2630 const b_elf_chdr *chdr;
002631 unsigned char *po;
26322633 *uncompressed = NULL;
···26392640 chdr = (const b_elf_chdr *) compressed;
26412642- if (chdr->ch_type != ELFCOMPRESS_ZLIB)
2643- {
2644- /* Unsupported compression algorithm. */
2645- return 1;
2646- }
2647-2648 if (*uncompressed != NULL && *uncompressed_size >= chdr->ch_size)
2649 po = *uncompressed;
2650 else
2651 {
2652- po = (unsigned char *) backtrace_alloc (state, chdr->ch_size,
2653- error_callback, data);
2654- if (po == NULL)
2655 return 0;
02656 }
26572658- if (!elf_zlib_inflate_and_verify (compressed + sizeof (b_elf_chdr),
2659- compressed_size - sizeof (b_elf_chdr),
2660- zdebug_table, po, chdr->ch_size))
2661- return 1;
0000000000000000026622663 *uncompressed = po;
2664 *uncompressed_size = chdr->ch_size;
2665000002666 return 1;
2667}
2668···2687 ret = elf_uncompress_zdebug (state, compressed, compressed_size,
2688 zdebug_table, error_callback, data,
2689 uncompressed, uncompressed_size);
00000000000000000000000002690 backtrace_free (state, zdebug_table, ZDEBUG_TABLE_SIZE,
2691 error_callback, data);
2692 return ret;
···4688 if (zdebug_table == NULL)
4689 {
4690 zdebug_table = ((uint16_t *)
4691- backtrace_alloc (state, ZDEBUG_TABLE_SIZE,
4692 error_callback, data));
4693 if (zdebug_table == NULL)
4694 goto fail;
···4714 }
4715 }
471600000004717 /* Uncompress the official ELF format
4718- (--compress-debug-sections=zlib-gabi). */
4719 for (i = 0; i < (int) DEBUG_MAX; ++i)
4720 {
4721 unsigned char *uncompressed_data;
···193#undef STT_FUNC
194#undef NT_GNU_BUILD_ID
195#undef ELFCOMPRESS_ZLIB
196+#undef ELFCOMPRESS_ZSTD
197198/* Basic types. */
199···351#endif /* BACKTRACE_ELF_SIZE != 32 */
352353#define ELFCOMPRESS_ZLIB 1
354+#define ELFCOMPRESS_ZSTD 2
355356/* Names of sections, indexed by enum dwarf_section in internal.h. */
357···1132 on error. */
11331134static int
1135+elf_fetch_bits (const unsigned char **ppin, const unsigned char *pinend,
1136 uint64_t *pval, unsigned int *pbits)
1137{
1138 unsigned int bits;
···1179 return 1;
1180}
11811182+/* This is like elf_fetch_bits, but it fetchs the bits backward, and ensures at
1183+ least 16 bits. This is for zstd. */
1184+1185+static int
1186+elf_fetch_bits_backward (const unsigned char **ppin,
1187+ const unsigned char *pinend,
1188+ uint64_t *pval, unsigned int *pbits)
1189+{
1190+ unsigned int bits;
1191+ const unsigned char *pin;
1192+ uint64_t val;
1193+ uint32_t next;
1194+1195+ bits = *pbits;
1196+ if (bits >= 16)
1197+ return 1;
1198+ pin = *ppin;
1199+ val = *pval;
1200+1201+ if (unlikely (pin <= pinend))
1202+ {
1203+ if (bits == 0)
1204+ {
1205+ elf_uncompress_failed ();
1206+ return 0;
1207+ }
1208+ return 1;
1209+ }
1210+1211+ pin -= 4;
1212+1213+#if defined(__BYTE_ORDER__) && defined(__ORDER_LITTLE_ENDIAN__) \
1214+ && defined(__ORDER_BIG_ENDIAN__) \
1215+ && (__BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ \
1216+ || __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
1217+ /* We've ensured that PIN is aligned. */
1218+ next = *(const uint32_t *)pin;
1219+1220+#if __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
1221+ next = __builtin_bswap32 (next);
1222+#endif
1223+#else
1224+ next = pin[0] | (pin[1] << 8) | (pin[2] << 16) | (pin[3] << 24);
1225+#endif
1226+1227+ val <<= 32;
1228+ val |= next;
1229+ bits += 32;
1230+1231+ if (unlikely (pin < pinend))
1232+ {
1233+ val >>= (pinend - pin) * 8;
1234+ bits -= (pinend - pin) * 8;
1235+ }
1236+1237+ *ppin = pin;
1238+ *pval = val;
1239+ *pbits = bits;
1240+ return 1;
1241+}
1242+1243+/* Initialize backward fetching when the bitstream starts with a 1 bit in the
1244+ last byte in memory (which is the first one that we read). This is used by
1245+ zstd decompression. Returns 1 on success, 0 on error. */
1246+1247+static int
1248+elf_fetch_backward_init (const unsigned char **ppin,
1249+ const unsigned char *pinend,
1250+ uint64_t *pval, unsigned int *pbits)
1251+{
1252+ const unsigned char *pin;
1253+ unsigned int stream_start;
1254+ uint64_t val;
1255+ unsigned int bits;
1256+1257+ pin = *ppin;
1258+ stream_start = (unsigned int)*pin;
1259+ if (unlikely (stream_start == 0))
1260+ {
1261+ elf_uncompress_failed ();
1262+ return 0;
1263+ }
1264+ val = 0;
1265+ bits = 0;
1266+1267+ /* Align to a 32-bit boundary. */
1268+ while ((((uintptr_t)pin) & 3) != 0)
1269+ {
1270+ val <<= 8;
1271+ val |= (uint64_t)*pin;
1272+ bits += 8;
1273+ --pin;
1274+ }
1275+1276+ val <<= 8;
1277+ val |= (uint64_t)*pin;
1278+ bits += 8;
1279+1280+ *ppin = pin;
1281+ *pval = val;
1282+ *pbits = bits;
1283+ if (!elf_fetch_bits_backward (ppin, pinend, pval, pbits))
1284+ return 0;
1285+1286+ *pbits -= __builtin_clz (stream_start) - (sizeof (unsigned int) - 1) * 8 + 1;
1287+1288+ if (!elf_fetch_bits_backward (ppin, pinend, pval, pbits))
1289+ return 0;
1290+1291+ return 1;
1292+}
1293+1294/* Huffman code tables, like the rest of the zlib format, are defined
1295 by RFC 1951. We store a Huffman code table as a series of tables
1296 stored sequentially in memory. Each entry in a table is 16 bits.
···1325/* Number of entries we allocate to for one code table. We get a page
1326 for the two code tables we need. */
13271328+#define ZLIB_HUFFMAN_TABLE_SIZE (1024)
13291330/* Bit masks and shifts for the values in the table. */
13311332+#define ZLIB_HUFFMAN_VALUE_MASK 0x01ff
1333+#define ZLIB_HUFFMAN_BITS_SHIFT 9
1334+#define ZLIB_HUFFMAN_BITS_MASK 0x7
1335+#define ZLIB_HUFFMAN_SECONDARY_SHIFT 12
13361337/* For working memory while inflating we need two code tables, we need
1338 an array of code lengths (max value 15, so we use unsigned char),
···1340 latter two arrays must be large enough to hold the maximum number
1341 of code lengths, which RFC 1951 defines as 286 + 30. */
13421343+#define ZLIB_TABLE_SIZE \
1344+ (2 * ZLIB_HUFFMAN_TABLE_SIZE * sizeof (uint16_t) \
1345 + (286 + 30) * sizeof (uint16_t) \
1346 + (286 + 30) * sizeof (unsigned char))
13471348+#define ZLIB_TABLE_CODELEN_OFFSET \
1349+ (2 * ZLIB_HUFFMAN_TABLE_SIZE * sizeof (uint16_t) \
1350 + (286 + 30) * sizeof (uint16_t))
13511352+#define ZLIB_TABLE_WORK_OFFSET \
1353+ (2 * ZLIB_HUFFMAN_TABLE_SIZE * sizeof (uint16_t))
13541355#ifdef BACKTRACE_GENERATE_FIXED_HUFFMAN_TABLE
1356···1383 next value after VAL with the same bit length. */
13841385 next = (uint16_t *) (((unsigned char *) zdebug_table)
1386+ + ZLIB_TABLE_WORK_OFFSET);
13871388 memset (&count[0], 0, 16 * sizeof (uint16_t));
1389 for (i = 0; i < codes_len; ++i)
···1411 /* For each length, fill in the table for the codes of that
1412 length. */
14131414+ memset (table, 0, ZLIB_HUFFMAN_TABLE_SIZE * sizeof (uint16_t));
14151416 /* Handle the values that do not require a secondary table. */
1417···1445 /* In the compressed bit stream, the value VAL is encoded as
1446 J bits with the value C. */
14471448+ if (unlikely ((val & ~ZLIB_HUFFMAN_VALUE_MASK) != 0))
1449 {
1450 elf_uncompress_failed ();
1451 return 0;
1452 }
14531454+ tval = val | ((j - 1) << ZLIB_HUFFMAN_BITS_SHIFT);
14551456 /* The table lookup uses 8 bits. If J is less than 8, we
1457 don't know what the other bits will be. We need to fill
···1601 {
1602 /* Start a new secondary table. */
16031604+ if (unlikely ((next_secondary & ZLIB_HUFFMAN_VALUE_MASK)
1605 != next_secondary))
1606 {
1607 elf_uncompress_failed ();
···1612 secondary_bits = j - 8;
1613 next_secondary += 1 << secondary_bits;
1614 table[primary] = (secondary
1615+ + ((j - 8) << ZLIB_HUFFMAN_BITS_SHIFT)
1616+ + (1U << ZLIB_HUFFMAN_SECONDARY_SHIFT));
1617 }
1618 else
1619 {
1620 /* There is an existing entry. It had better be a
1621 secondary table with enough bits. */
1622+ if (unlikely ((tprimary
1623+ & (1U << ZLIB_HUFFMAN_SECONDARY_SHIFT))
1624 == 0))
1625 {
1626 elf_uncompress_failed ();
1627 return 0;
1628 }
1629+ secondary = tprimary & ZLIB_HUFFMAN_VALUE_MASK;
1630+ secondary_bits = ((tprimary >> ZLIB_HUFFMAN_BITS_SHIFT)
1631+ & ZLIB_HUFFMAN_BITS_MASK);
1632 if (unlikely (secondary_bits < j - 8))
1633 {
1634 elf_uncompress_failed ();
···16391640 /* Fill in secondary table entries. */
16411642+ tval = val | ((j - 8) << ZLIB_HUFFMAN_BITS_SHIFT);
16431644 for (ind = code >> 8;
1645 ind < (1U << secondary_bits);
···16821683#include <stdio.h>
16841685+static uint16_t table[ZLIB_TABLE_SIZE];
1686static unsigned char codes[288];
16871688int
···1910 const uint16_t *tlit;
1911 const uint16_t *tdist;
19121913+ if (!elf_fetch_bits (&pin, pinend, &val, &bits))
1914 return 0;
19151916 last = val & 1;
···1998 /* Read a Huffman encoding table. The various magic
1999 numbers here are from RFC 1951. */
20002001+ if (!elf_fetch_bits (&pin, pinend, &val, &bits))
2002 return 0;
20032004 nlit = (val & 0x1f) + 257;
···2023 /* There are always at least 4 elements in the
2024 table. */
20252026+ if (!elf_fetch_bits (&pin, pinend, &val, &bits))
2027 return 0;
20282029 codebits[16] = val & 7;
···2043 if (nclen == 5)
2044 goto codebitsdone;
20452046+ if (!elf_fetch_bits (&pin, pinend, &val, &bits))
2047 return 0;
20482049 codebits[7] = val & 7;
···2081 if (nclen == 10)
2082 goto codebitsdone;
20832084+ if (!elf_fetch_bits (&pin, pinend, &val, &bits))
2085 return 0;
20862087 codebits[11] = val & 7;
···2119 if (nclen == 15)
2120 goto codebitsdone;
21212122+ if (!elf_fetch_bits (&pin, pinend, &val, &bits))
2123 return 0;
21242125 codebits[2] = val & 7;
···2158 at the end of zdebug_table to hold them. */
21592160 plenbase = (((unsigned char *) zdebug_table)
2161+ + ZLIB_TABLE_CODELEN_OFFSET);
2162 plen = plenbase;
2163 plenend = plen + nlit + ndist;
2164 while (plen < plenend)
···2167 unsigned int b;
2168 uint16_t v;
21692170+ if (!elf_fetch_bits (&pin, pinend, &val, &bits))
2171 return 0;
21722173 t = zdebug_table[val & 0xff];
21742175 /* The compression here uses bit lengths up to 7, so
2176 a secondary table is never necessary. */
2177+ if (unlikely ((t & (1U << ZLIB_HUFFMAN_SECONDARY_SHIFT))
2178+ != 0))
2179 {
2180 elf_uncompress_failed ();
2181 return 0;
2182 }
21832184+ b = (t >> ZLIB_HUFFMAN_BITS_SHIFT) & ZLIB_HUFFMAN_BITS_MASK;
2185 val >>= b + 1;
2186 bits -= b + 1;
21872188+ v = t & ZLIB_HUFFMAN_VALUE_MASK;
2189 if (v < 16)
2190 *plen++ = v;
2191 else if (v == 16)
···2202 }
22032204 /* We used up to 7 bits since the last
2205+ elf_fetch_bits, so we have at least 8 bits
2206 available here. */
22072208 c = 3 + (val & 0x3);
···2237 /* Store zero 3 to 10 times. */
22382239 /* We used up to 7 bits since the last
2240+ elf_fetch_bits, so we have at least 8 bits
2241 available here. */
22422243 c = 3 + (val & 0x7);
···2283 /* Store zero 11 to 138 times. */
22842285 /* We used up to 7 bits since the last
2286+ elf_fetch_bits, so we have at least 8 bits
2287 available here. */
22882289 c = 11 + (val & 0x7f);
···2320 zdebug_table))
2321 return 0;
2322 if (!elf_zlib_inflate_table (plen + nlit, ndist, zdebug_table,
2323+ (zdebug_table
2324+ + ZLIB_HUFFMAN_TABLE_SIZE)))
2325 return 0;
2326 tlit = zdebug_table;
2327+ tdist = zdebug_table + ZLIB_HUFFMAN_TABLE_SIZE;
2328 }
23292330 /* Inflate values until the end of the block. This is the
···2337 uint16_t v;
2338 unsigned int lit;
23392340+ if (!elf_fetch_bits (&pin, pinend, &val, &bits))
2341 return 0;
23422343 t = tlit[val & 0xff];
2344+ b = (t >> ZLIB_HUFFMAN_BITS_SHIFT) & ZLIB_HUFFMAN_BITS_MASK;
2345+ v = t & ZLIB_HUFFMAN_VALUE_MASK;
23462347+ if ((t & (1U << ZLIB_HUFFMAN_SECONDARY_SHIFT)) == 0)
2348 {
2349 lit = v;
2350 val >>= b + 1;
···2353 else
2354 {
2355 t = tlit[v + 0x100 + ((val >> 8) & ((1U << b) - 1))];
2356+ b = (t >> ZLIB_HUFFMAN_BITS_SHIFT) & ZLIB_HUFFMAN_BITS_MASK;
2357+ lit = t & ZLIB_HUFFMAN_VALUE_MASK;
2358 val >>= b + 8;
2359 bits -= b + 8;
2360 }
···2399 {
2400 unsigned int extra;
24012402+ if (!elf_fetch_bits (&pin, pinend, &val, &bits))
2403 return 0;
24042405 /* This is an expression for the table of length
···2414 bits -= extra;
2415 }
24162417+ if (!elf_fetch_bits (&pin, pinend, &val, &bits))
2418 return 0;
24192420 t = tdist[val & 0xff];
2421+ b = (t >> ZLIB_HUFFMAN_BITS_SHIFT) & ZLIB_HUFFMAN_BITS_MASK;
2422+ v = t & ZLIB_HUFFMAN_VALUE_MASK;
24232424+ if ((t & (1U << ZLIB_HUFFMAN_SECONDARY_SHIFT)) == 0)
2425 {
2426 dist = v;
2427 val >>= b + 1;
···2430 else
2431 {
2432 t = tdist[v + 0x100 + ((val >> 8) & ((1U << b) - 1))];
2433+ b = ((t >> ZLIB_HUFFMAN_BITS_SHIFT)
2434+ & ZLIB_HUFFMAN_BITS_MASK);
2435+ dist = t & ZLIB_HUFFMAN_VALUE_MASK;
2436 val >>= b + 8;
2437 bits -= b + 8;
2438 }
···2472 {
2473 unsigned int extra;
24742475+ if (!elf_fetch_bits (&pin, pinend, &val, &bits))
2476 return 0;
24772478 /* This is an expression for the table of
···2677 return 1;
2678}
26792680+/* For working memory during zstd compression, we need
2681+ - a literal length FSE table: 512 64-bit values == 4096 bytes
2682+ - a match length FSE table: 512 64-bit values == 4096 bytes
2683+ - a offset FSE table: 256 64-bit values == 2048 bytes
2684+ - a Huffman tree: 2048 uint16_t values == 4096 bytes
2685+ - scratch space, one of
2686+ - to build an FSE table: 512 uint16_t values == 1024 bytes
2687+ - to build a Huffman tree: 512 uint16_t + 256 uint32_t == 2048 bytes
2688+*/
2689+2690+#define ZSTD_TABLE_SIZE \
2691+ (2 * 512 * sizeof (struct elf_zstd_fse_baseline_entry) \
2692+ + 256 * sizeof (struct elf_zstd_fse_baseline_entry) \
2693+ + 2048 * sizeof (uint16_t) \
2694+ + 512 * sizeof (uint16_t) + 256 * sizeof (uint32_t))
2695+2696+#define ZSTD_TABLE_LITERAL_FSE_OFFSET (0)
2697+2698+#define ZSTD_TABLE_MATCH_FSE_OFFSET \
2699+ (512 * sizeof (struct elf_zstd_fse_baseline_entry))
2700+2701+#define ZSTD_TABLE_OFFSET_FSE_OFFSET \
2702+ (ZSTD_TABLE_MATCH_FSE_OFFSET \
2703+ + 512 * sizeof (struct elf_zstd_fse_baseline_entry))
2704+2705+#define ZSTD_TABLE_HUFFMAN_OFFSET \
2706+ (ZSTD_TABLE_OFFSET_FSE_OFFSET \
2707+ + 256 * sizeof (struct elf_zstd_fse_baseline_entry))
2708+2709+#define ZSTD_TABLE_WORK_OFFSET \
2710+ (ZSTD_TABLE_HUFFMAN_OFFSET + 2048 * sizeof (uint16_t))
2711+2712+/* An entry in a zstd FSE table. */
2713+2714+struct elf_zstd_fse_entry
2715+{
2716+ /* The value that this FSE entry represents. */
2717+ unsigned char symbol;
2718+ /* The number of bits to read to determine the next state. */
2719+ unsigned char bits;
2720+ /* Add the bits to this base to get the next state. */
2721+ uint16_t base;
2722+};
2723+2724+static int
2725+elf_zstd_build_fse (const int16_t *, int, uint16_t *, int,
2726+ struct elf_zstd_fse_entry *);
2727+2728+/* Read a zstd FSE table and build the decoding table in *TABLE, updating *PPIN
2729+ as it reads. ZDEBUG_TABLE is scratch space; it must be enough for 512
2730+ uint16_t values (1024 bytes). MAXIDX is the maximum number of symbols
2731+ permitted. *TABLE_BITS is the maximum number of bits for symbols in the
2732+ table: the size of *TABLE is at least 1 << *TABLE_BITS. This updates
2733+ *TABLE_BITS to the actual number of bits. Returns 1 on success, 0 on
2734+ error. */
2735+2736+static int
2737+elf_zstd_read_fse (const unsigned char **ppin, const unsigned char *pinend,
2738+ uint16_t *zdebug_table, int maxidx,
2739+ struct elf_zstd_fse_entry *table, int *table_bits)
2740+{
2741+ const unsigned char *pin;
2742+ int16_t *norm;
2743+ uint16_t *next;
2744+ uint64_t val;
2745+ unsigned int bits;
2746+ int accuracy_log;
2747+ uint32_t remaining;
2748+ uint32_t threshold;
2749+ int bits_needed;
2750+ int idx;
2751+ int prev0;
2752+2753+ pin = *ppin;
2754+2755+ norm = (int16_t *) zdebug_table;
2756+ next = zdebug_table + 256;
2757+2758+ if (unlikely (pin + 3 >= pinend))
2759+ {
2760+ elf_uncompress_failed ();
2761+ return 0;
2762+ }
2763+2764+ /* Align PIN to a 32-bit boundary. */
2765+2766+ val = 0;
2767+ bits = 0;
2768+ while ((((uintptr_t) pin) & 3) != 0)
2769+ {
2770+ val |= (uint64_t)*pin << bits;
2771+ bits += 8;
2772+ ++pin;
2773+ }
2774+2775+ if (!elf_fetch_bits (&pin, pinend, &val, &bits))
2776+ return 0;
2777+2778+ accuracy_log = (val & 0xf) + 5;
2779+ if (accuracy_log > *table_bits)
2780+ {
2781+ elf_uncompress_failed ();
2782+ return 0;
2783+ }
2784+ *table_bits = accuracy_log;
2785+ val >>= 4;
2786+ bits -= 4;
2787+2788+ /* This code is mostly copied from the reference implementation. */
2789+2790+ /* The number of remaining probabilities, plus 1. This sets the number of
2791+ bits that need to be read for the next value. */
2792+ remaining = (1 << accuracy_log) + 1;
2793+2794+ /* The current difference between small and large values, which depends on
2795+ the number of remaining values. Small values use one less bit. */
2796+ threshold = 1 << accuracy_log;
2797+2798+ /* The number of bits used to compute threshold. */
2799+ bits_needed = accuracy_log + 1;
2800+2801+ /* The next character value. */
2802+ idx = 0;
2803+2804+ /* Whether the last count was 0. */
2805+ prev0 = 0;
2806+2807+ while (remaining > 1 && idx <= maxidx)
2808+ {
2809+ uint32_t max;
2810+ int32_t count;
2811+2812+ if (!elf_fetch_bits (&pin, pinend, &val, &bits))
2813+ return 0;
2814+2815+ if (prev0)
2816+ {
2817+ int zidx;
2818+2819+ /* Previous count was 0, so there is a 2-bit repeat flag. If the
2820+ 2-bit flag is 0b11, it adds 3 and then there is another repeat
2821+ flag. */
2822+ zidx = idx;
2823+ while ((val & 0xfff) == 0xfff)
2824+ {
2825+ zidx += 3 * 6;
2826+ if (!elf_fetch_bits (&pin, pinend, &val, &bits))
2827+ return 0;
2828+ val >>= 12;
2829+ bits -= 12;
2830+ }
2831+ while ((val & 3) == 3)
2832+ {
2833+ zidx += 3;
2834+ if (!elf_fetch_bits (&pin, pinend, &val, &bits))
2835+ return 0;
2836+ val >>= 2;
2837+ bits -= 2;
2838+ }
2839+ /* We have at least 13 bits here, don't need to fetch. */
2840+ zidx += val & 3;
2841+ val >>= 2;
2842+ bits -= 2;
2843+2844+ if (unlikely (zidx > maxidx))
2845+ {
2846+ elf_uncompress_failed ();
2847+ return 0;
2848+ }
2849+2850+ for (; idx < zidx; idx++)
2851+ norm[idx] = 0;
2852+2853+ prev0 = 0;
2854+ continue;
2855+ }
2856+2857+ max = (2 * threshold - 1) - remaining;
2858+ if ((val & (threshold - 1)) < max)
2859+ {
2860+ /* A small value. */
2861+ count = (int32_t) ((uint32_t) val & (threshold - 1));
2862+ val >>= bits_needed - 1;
2863+ bits -= bits_needed - 1;
2864+ }
2865+ else
2866+ {
2867+ /* A large value. */
2868+ count = (int32_t) ((uint32_t) val & (2 * threshold - 1));
2869+ if (count >= (int32_t) threshold)
2870+ count -= (int32_t) max;
2871+ val >>= bits_needed;
2872+ bits -= bits_needed;
2873+ }
2874+2875+ count--;
2876+ if (count >= 0)
2877+ remaining -= count;
2878+ else
2879+ remaining--;
2880+ if (unlikely (idx >= 256))
2881+ {
2882+ elf_uncompress_failed ();
2883+ return 0;
2884+ }
2885+ norm[idx] = (int16_t) count;
2886+ ++idx;
2887+2888+ prev0 = count == 0;
2889+2890+ while (remaining < threshold)
2891+ {
2892+ bits_needed--;
2893+ threshold >>= 1;
2894+ }
2895+ }
2896+2897+ if (unlikely (remaining != 1))
2898+ {
2899+ elf_uncompress_failed ();
2900+ return 0;
2901+ }
2902+2903+ /* If we've read ahead more than a byte, back up. */
2904+ while (bits >= 8)
2905+ {
2906+ --pin;
2907+ bits -= 8;
2908+ }
2909+2910+ *ppin = pin;
2911+2912+ for (; idx <= maxidx; idx++)
2913+ norm[idx] = 0;
2914+2915+ return elf_zstd_build_fse (norm, idx, next, *table_bits, table);
2916+}
2917+2918+/* Build the FSE decoding table from a list of probabilities. This reads from
2919+ NORM of length IDX, uses NEXT as scratch space, and writes to *TABLE, whose
2920+ size is TABLE_BITS. */
2921+2922+static int
2923+elf_zstd_build_fse (const int16_t *norm, int idx, uint16_t *next,
2924+ int table_bits, struct elf_zstd_fse_entry *table)
2925+{
2926+ int table_size;
2927+ int high_threshold;
2928+ int i;
2929+ int pos;
2930+ int step;
2931+ int mask;
2932+2933+ table_size = 1 << table_bits;
2934+ high_threshold = table_size - 1;
2935+ for (i = 0; i < idx; i++)
2936+ {
2937+ int16_t n;
2938+2939+ n = norm[i];
2940+ if (n >= 0)
2941+ next[i] = (uint16_t) n;
2942+ else
2943+ {
2944+ table[high_threshold].symbol = (unsigned char) i;
2945+ high_threshold--;
2946+ next[i] = 1;
2947+ }
2948+ }
2949+2950+ pos = 0;
2951+ step = (table_size >> 1) + (table_size >> 3) + 3;
2952+ mask = table_size - 1;
2953+ for (i = 0; i < idx; i++)
2954+ {
2955+ int n;
2956+ int j;
2957+2958+ n = (int) norm[i];
2959+ for (j = 0; j < n; j++)
2960+ {
2961+ table[pos].symbol = (unsigned char) i;
2962+ pos = (pos + step) & mask;
2963+ while (unlikely (pos > high_threshold))
2964+ pos = (pos + step) & mask;
2965+ }
2966+ }
2967+ if (pos != 0)
2968+ {
2969+ elf_uncompress_failed ();
2970+ return 0;
2971+ }
2972+2973+ for (i = 0; i < table_size; i++)
2974+ {
2975+ unsigned char sym;
2976+ uint16_t next_state;
2977+ int high_bit;
2978+ int bits;
2979+2980+ sym = table[i].symbol;
2981+ next_state = next[sym];
2982+ ++next[sym];
2983+2984+ if (next_state == 0)
2985+ {
2986+ elf_uncompress_failed ();
2987+ return 0;
2988+ }
2989+ high_bit = 31 - __builtin_clz (next_state);
2990+2991+ bits = table_bits - high_bit;
2992+ table[i].bits = (unsigned char) bits;
2993+ table[i].base = (uint16_t) ((next_state << bits) - table_size);
2994+ }
2995+2996+ return 1;
2997+}
2998+2999+/* Encode the baseline and bits into a single 32-bit value. */
3000+3001+#define ZSTD_ENCODE_BASELINE_BITS(baseline, basebits) \
3002+ ((uint32_t)(baseline) | ((uint32_t)(basebits) << 24))
3003+3004+#define ZSTD_DECODE_BASELINE(baseline_basebits) \
3005+ ((uint32_t)(baseline_basebits) & 0xffffff)
3006+3007+#define ZSTD_DECODE_BASEBITS(baseline_basebits) \
3008+ ((uint32_t)(baseline_basebits) >> 24)
3009+3010+/* Given a literal length code, we need to read a number of bits and add that
3011+ to a baseline. For states 0 to 15 the baseline is the state and the number
3012+ of bits is zero. */
3013+3014+#define ZSTD_LITERAL_LENGTH_BASELINE_OFFSET (16)
3015+3016+static const uint32_t elf_zstd_literal_length_base[] =
3017+{
3018+ ZSTD_ENCODE_BASELINE_BITS(16, 1),
3019+ ZSTD_ENCODE_BASELINE_BITS(18, 1),
3020+ ZSTD_ENCODE_BASELINE_BITS(20, 1),
3021+ ZSTD_ENCODE_BASELINE_BITS(22, 1),
3022+ ZSTD_ENCODE_BASELINE_BITS(24, 2),
3023+ ZSTD_ENCODE_BASELINE_BITS(28, 2),
3024+ ZSTD_ENCODE_BASELINE_BITS(32, 3),
3025+ ZSTD_ENCODE_BASELINE_BITS(40, 3),
3026+ ZSTD_ENCODE_BASELINE_BITS(48, 4),
3027+ ZSTD_ENCODE_BASELINE_BITS(64, 6),
3028+ ZSTD_ENCODE_BASELINE_BITS(128, 7),
3029+ ZSTD_ENCODE_BASELINE_BITS(256, 8),
3030+ ZSTD_ENCODE_BASELINE_BITS(512, 9),
3031+ ZSTD_ENCODE_BASELINE_BITS(1024, 10),
3032+ ZSTD_ENCODE_BASELINE_BITS(2048, 11),
3033+ ZSTD_ENCODE_BASELINE_BITS(4096, 12),
3034+ ZSTD_ENCODE_BASELINE_BITS(8192, 13),
3035+ ZSTD_ENCODE_BASELINE_BITS(16384, 14),
3036+ ZSTD_ENCODE_BASELINE_BITS(32768, 15),
3037+ ZSTD_ENCODE_BASELINE_BITS(65536, 16)
3038+};
3039+3040+/* The same applies to match length codes. For states 0 to 31 the baseline is
3041+ the state + 3 and the number of bits is zero. */
3042+3043+#define ZSTD_MATCH_LENGTH_BASELINE_OFFSET (32)
3044+3045+static const uint32_t elf_zstd_match_length_base[] =
3046+{
3047+ ZSTD_ENCODE_BASELINE_BITS(35, 1),
3048+ ZSTD_ENCODE_BASELINE_BITS(37, 1),
3049+ ZSTD_ENCODE_BASELINE_BITS(39, 1),
3050+ ZSTD_ENCODE_BASELINE_BITS(41, 1),
3051+ ZSTD_ENCODE_BASELINE_BITS(43, 2),
3052+ ZSTD_ENCODE_BASELINE_BITS(47, 2),
3053+ ZSTD_ENCODE_BASELINE_BITS(51, 3),
3054+ ZSTD_ENCODE_BASELINE_BITS(59, 3),
3055+ ZSTD_ENCODE_BASELINE_BITS(67, 4),
3056+ ZSTD_ENCODE_BASELINE_BITS(83, 4),
3057+ ZSTD_ENCODE_BASELINE_BITS(99, 5),
3058+ ZSTD_ENCODE_BASELINE_BITS(131, 7),
3059+ ZSTD_ENCODE_BASELINE_BITS(259, 8),
3060+ ZSTD_ENCODE_BASELINE_BITS(515, 9),
3061+ ZSTD_ENCODE_BASELINE_BITS(1027, 10),
3062+ ZSTD_ENCODE_BASELINE_BITS(2051, 11),
3063+ ZSTD_ENCODE_BASELINE_BITS(4099, 12),
3064+ ZSTD_ENCODE_BASELINE_BITS(8195, 13),
3065+ ZSTD_ENCODE_BASELINE_BITS(16387, 14),
3066+ ZSTD_ENCODE_BASELINE_BITS(32771, 15),
3067+ ZSTD_ENCODE_BASELINE_BITS(65539, 16)
3068+};
3069+3070+/* An entry in an FSE table used for literal/match/length values. For these we
3071+ have to map the symbol to a baseline value, and we have to read zero or more
3072+ bits and add that value to the baseline value. Rather than look the values
3073+ up in a separate table, we grow the FSE table so that we get better memory
3074+ caching. */
3075+3076+struct elf_zstd_fse_baseline_entry
3077+{
3078+ /* The baseline for the value that this FSE entry represents.. */
3079+ uint32_t baseline;
3080+ /* The number of bits to read to add to the baseline. */
3081+ unsigned char basebits;
3082+ /* The number of bits to read to determine the next state. */
3083+ unsigned char bits;
3084+ /* Add the bits to this base to get the next state. */
3085+ uint16_t base;
3086+};
3087+3088+/* Convert the literal length FSE table FSE_TABLE to an FSE baseline table at
3089+ BASELINE_TABLE. Note that FSE_TABLE and BASELINE_TABLE will overlap. */
3090+3091+static int
3092+elf_zstd_make_literal_baseline_fse (
3093+ const struct elf_zstd_fse_entry *fse_table,
3094+ int table_bits,
3095+ struct elf_zstd_fse_baseline_entry *baseline_table)
3096+{
3097+ size_t count;
3098+ const struct elf_zstd_fse_entry *pfse;
3099+ struct elf_zstd_fse_baseline_entry *pbaseline;
3100+3101+ /* Convert backward to avoid overlap. */
3102+3103+ count = 1U << table_bits;
3104+ pfse = fse_table + count;
3105+ pbaseline = baseline_table + count;
3106+ while (pfse > fse_table)
3107+ {
3108+ unsigned char symbol;
3109+ unsigned char bits;
3110+ uint16_t base;
3111+3112+ --pfse;
3113+ --pbaseline;
3114+ symbol = pfse->symbol;
3115+ bits = pfse->bits;
3116+ base = pfse->base;
3117+ if (symbol < ZSTD_LITERAL_LENGTH_BASELINE_OFFSET)
3118+ {
3119+ pbaseline->baseline = (uint32_t)symbol;
3120+ pbaseline->basebits = 0;
3121+ }
3122+ else
3123+ {
3124+ unsigned int idx;
3125+ uint32_t basebits;
3126+3127+ if (unlikely (symbol > 35))
3128+ {
3129+ elf_uncompress_failed ();
3130+ return 0;
3131+ }
3132+ idx = symbol - ZSTD_LITERAL_LENGTH_BASELINE_OFFSET;
3133+ basebits = elf_zstd_literal_length_base[idx];
3134+ pbaseline->baseline = ZSTD_DECODE_BASELINE(basebits);
3135+ pbaseline->basebits = ZSTD_DECODE_BASEBITS(basebits);
3136+ }
3137+ pbaseline->bits = bits;
3138+ pbaseline->base = base;
3139+ }
3140+3141+ return 1;
3142+}
3143+3144+/* Convert the offset length FSE table FSE_TABLE to an FSE baseline table at
3145+ BASELINE_TABLE. Note that FSE_TABLE and BASELINE_TABLE will overlap. */
3146+3147+static int
3148+elf_zstd_make_offset_baseline_fse (
3149+ const struct elf_zstd_fse_entry *fse_table,
3150+ int table_bits,
3151+ struct elf_zstd_fse_baseline_entry *baseline_table)
3152+{
3153+ size_t count;
3154+ const struct elf_zstd_fse_entry *pfse;
3155+ struct elf_zstd_fse_baseline_entry *pbaseline;
3156+3157+ /* Convert backward to avoid overlap. */
3158+3159+ count = 1U << table_bits;
3160+ pfse = fse_table + count;
3161+ pbaseline = baseline_table + count;
3162+ while (pfse > fse_table)
3163+ {
3164+ unsigned char symbol;
3165+ unsigned char bits;
3166+ uint16_t base;
3167+3168+ --pfse;
3169+ --pbaseline;
3170+ symbol = pfse->symbol;
3171+ bits = pfse->bits;
3172+ base = pfse->base;
3173+ if (unlikely (symbol > 31))
3174+ {
3175+ elf_uncompress_failed ();
3176+ return 0;
3177+ }
3178+3179+ /* The simple way to write this is
3180+3181+ pbaseline->baseline = (uint32_t)1 << symbol;
3182+ pbaseline->basebits = symbol;
3183+3184+ That will give us an offset value that corresponds to the one
3185+ described in the RFC. However, for offset values > 3, we have to
3186+ subtract 3. And for offset values 1, 2, 3 we use a repeated offset.
3187+ The baseline is always a power of 2, and is never 0, so for these low
3188+ values we will see one entry that is baseline 1, basebits 0, and one
3189+ entry that is baseline 2, basebits 1. All other entries will have
3190+ baseline >= 4 and basebits >= 2.
3191+3192+ So we can check for RFC offset <= 3 by checking for basebits <= 1.
3193+ And that means that we can subtract 3 here and not worry about doing
3194+ it in the hot loop. */
3195+3196+ pbaseline->baseline = (uint32_t)1 << symbol;
3197+ if (symbol >= 2)
3198+ pbaseline->baseline -= 3;
3199+ pbaseline->basebits = symbol;
3200+ pbaseline->bits = bits;
3201+ pbaseline->base = base;
3202+ }
3203+3204+ return 1;
3205+}
3206+3207+/* Convert the match length FSE table FSE_TABLE to an FSE baseline table at
3208+ BASELINE_TABLE. Note that FSE_TABLE and BASELINE_TABLE will overlap. */
3209+3210+static int
3211+elf_zstd_make_match_baseline_fse (
3212+ const struct elf_zstd_fse_entry *fse_table,
3213+ int table_bits,
3214+ struct elf_zstd_fse_baseline_entry *baseline_table)
3215+{
3216+ size_t count;
3217+ const struct elf_zstd_fse_entry *pfse;
3218+ struct elf_zstd_fse_baseline_entry *pbaseline;
3219+3220+ /* Convert backward to avoid overlap. */
3221+3222+ count = 1U << table_bits;
3223+ pfse = fse_table + count;
3224+ pbaseline = baseline_table + count;
3225+ while (pfse > fse_table)
3226+ {
3227+ unsigned char symbol;
3228+ unsigned char bits;
3229+ uint16_t base;
3230+3231+ --pfse;
3232+ --pbaseline;
3233+ symbol = pfse->symbol;
3234+ bits = pfse->bits;
3235+ base = pfse->base;
3236+ if (symbol < ZSTD_MATCH_LENGTH_BASELINE_OFFSET)
3237+ {
3238+ pbaseline->baseline = (uint32_t)symbol + 3;
3239+ pbaseline->basebits = 0;
3240+ }
3241+ else
3242+ {
3243+ unsigned int idx;
3244+ uint32_t basebits;
3245+3246+ if (unlikely (symbol > 52))
3247+ {
3248+ elf_uncompress_failed ();
3249+ return 0;
3250+ }
3251+ idx = symbol - ZSTD_MATCH_LENGTH_BASELINE_OFFSET;
3252+ basebits = elf_zstd_match_length_base[idx];
3253+ pbaseline->baseline = ZSTD_DECODE_BASELINE(basebits);
3254+ pbaseline->basebits = ZSTD_DECODE_BASEBITS(basebits);
3255+ }
3256+ pbaseline->bits = bits;
3257+ pbaseline->base = base;
3258+ }
3259+3260+ return 1;
3261+}
3262+3263+#ifdef BACKTRACE_GENERATE_ZSTD_FSE_TABLES
3264+3265+/* Used to generate the predefined FSE decoding tables for zstd. */
3266+3267+#include <stdio.h>
3268+3269+/* These values are straight from RFC 8878. */
3270+3271+static int16_t lit[36] =
3272+{
3273+ 4, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1,
3274+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 1, 1, 1, 1, 1,
3275+ -1,-1,-1,-1
3276+};
3277+3278+static int16_t match[53] =
3279+{
3280+ 1, 4, 3, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1,
3281+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
3282+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,-1,-1,
3283+ -1,-1,-1,-1,-1
3284+};
3285+3286+static int16_t offset[29] =
3287+{
3288+ 1, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1,
3289+ 1, 1, 1, 1, 1, 1, 1, 1,-1,-1,-1,-1,-1
3290+};
3291+3292+static uint16_t next[256];
3293+3294+static void
3295+print_table (const struct elf_zstd_fse_baseline_entry *table, size_t size)
3296+{
3297+ size_t i;
3298+3299+ printf ("{\n");
3300+ for (i = 0; i < size; i += 3)
3301+ {
3302+ int j;
3303+3304+ printf (" ");
3305+ for (j = 0; j < 3 && i + j < size; ++j)
3306+ printf (" { %u, %d, %d, %d },", table[i + j].baseline,
3307+ table[i + j].basebits, table[i + j].bits,
3308+ table[i + j].base);
3309+ printf ("\n");
3310+ }
3311+ printf ("};\n");
3312+}
3313+3314+int
3315+main ()
3316+{
3317+ struct elf_zstd_fse_entry lit_table[64];
3318+ struct elf_zstd_fse_baseline_entry lit_baseline[64];
3319+ struct elf_zstd_fse_entry match_table[64];
3320+ struct elf_zstd_fse_baseline_entry match_baseline[64];
3321+ struct elf_zstd_fse_entry offset_table[32];
3322+ struct elf_zstd_fse_baseline_entry offset_baseline[32];
3323+3324+ if (!elf_zstd_build_fse (lit, sizeof lit / sizeof lit[0], next,
3325+ 6, lit_table))
3326+ {
3327+ fprintf (stderr, "elf_zstd_build_fse failed\n");
3328+ exit (EXIT_FAILURE);
3329+ }
3330+3331+ if (!elf_zstd_make_literal_baseline_fse (lit_table, 6, lit_baseline))
3332+ {
3333+ fprintf (stderr, "elf_zstd_make_literal_baseline_fse failed\n");
3334+ exit (EXIT_FAILURE);
3335+ }
3336+3337+ printf ("static const struct elf_zstd_fse_baseline_entry "
3338+ "elf_zstd_lit_table[64] =\n");
3339+ print_table (lit_baseline,
3340+ sizeof lit_baseline / sizeof lit_baseline[0]);
3341+ printf ("\n");
3342+3343+ if (!elf_zstd_build_fse (match, sizeof match / sizeof match[0], next,
3344+ 6, match_table))
3345+ {
3346+ fprintf (stderr, "elf_zstd_build_fse failed\n");
3347+ exit (EXIT_FAILURE);
3348+ }
3349+3350+ if (!elf_zstd_make_match_baseline_fse (match_table, 6, match_baseline))
3351+ {
3352+ fprintf (stderr, "elf_zstd_make_match_baseline_fse failed\n");
3353+ exit (EXIT_FAILURE);
3354+ }
3355+3356+ printf ("static const struct elf_zstd_fse_baseline_entry "
3357+ "elf_zstd_match_table[64] =\n");
3358+ print_table (match_baseline,
3359+ sizeof match_baseline / sizeof match_baseline[0]);
3360+ printf ("\n");
3361+3362+ if (!elf_zstd_build_fse (offset, sizeof offset / sizeof offset[0], next,
3363+ 5, offset_table))
3364+ {
3365+ fprintf (stderr, "elf_zstd_build_fse failed\n");
3366+ exit (EXIT_FAILURE);
3367+ }
3368+3369+ if (!elf_zstd_make_offset_baseline_fse (offset_table, 5, offset_baseline))
3370+ {
3371+ fprintf (stderr, "elf_zstd_make_offset_baseline_fse failed\n");
3372+ exit (EXIT_FAILURE);
3373+ }
3374+3375+ printf ("static const struct elf_zstd_fse_baseline_entry "
3376+ "elf_zstd_offset_table[32] =\n");
3377+ print_table (offset_baseline,
3378+ sizeof offset_baseline / sizeof offset_baseline[0]);
3379+ printf ("\n");
3380+3381+ return 0;
3382+}
3383+3384+#endif
3385+3386+/* The fixed tables generated by the #ifdef'ed out main function
3387+ above. */
3388+3389+static const struct elf_zstd_fse_baseline_entry elf_zstd_lit_table[64] =
3390+{
3391+ { 0, 0, 4, 0 }, { 0, 0, 4, 16 }, { 1, 0, 5, 32 },
3392+ { 3, 0, 5, 0 }, { 4, 0, 5, 0 }, { 6, 0, 5, 0 },
3393+ { 7, 0, 5, 0 }, { 9, 0, 5, 0 }, { 10, 0, 5, 0 },
3394+ { 12, 0, 5, 0 }, { 14, 0, 6, 0 }, { 16, 1, 5, 0 },
3395+ { 20, 1, 5, 0 }, { 22, 1, 5, 0 }, { 28, 2, 5, 0 },
3396+ { 32, 3, 5, 0 }, { 48, 4, 5, 0 }, { 64, 6, 5, 32 },
3397+ { 128, 7, 5, 0 }, { 256, 8, 6, 0 }, { 1024, 10, 6, 0 },
3398+ { 4096, 12, 6, 0 }, { 0, 0, 4, 32 }, { 1, 0, 4, 0 },
3399+ { 2, 0, 5, 0 }, { 4, 0, 5, 32 }, { 5, 0, 5, 0 },
3400+ { 7, 0, 5, 32 }, { 8, 0, 5, 0 }, { 10, 0, 5, 32 },
3401+ { 11, 0, 5, 0 }, { 13, 0, 6, 0 }, { 16, 1, 5, 32 },
3402+ { 18, 1, 5, 0 }, { 22, 1, 5, 32 }, { 24, 2, 5, 0 },
3403+ { 32, 3, 5, 32 }, { 40, 3, 5, 0 }, { 64, 6, 4, 0 },
3404+ { 64, 6, 4, 16 }, { 128, 7, 5, 32 }, { 512, 9, 6, 0 },
3405+ { 2048, 11, 6, 0 }, { 0, 0, 4, 48 }, { 1, 0, 4, 16 },
3406+ { 2, 0, 5, 32 }, { 3, 0, 5, 32 }, { 5, 0, 5, 32 },
3407+ { 6, 0, 5, 32 }, { 8, 0, 5, 32 }, { 9, 0, 5, 32 },
3408+ { 11, 0, 5, 32 }, { 12, 0, 5, 32 }, { 15, 0, 6, 0 },
3409+ { 18, 1, 5, 32 }, { 20, 1, 5, 32 }, { 24, 2, 5, 32 },
3410+ { 28, 2, 5, 32 }, { 40, 3, 5, 32 }, { 48, 4, 5, 32 },
3411+ { 65536, 16, 6, 0 }, { 32768, 15, 6, 0 }, { 16384, 14, 6, 0 },
3412+ { 8192, 13, 6, 0 },
3413+};
3414+3415+static const struct elf_zstd_fse_baseline_entry elf_zstd_match_table[64] =
3416+{
3417+ { 3, 0, 6, 0 }, { 4, 0, 4, 0 }, { 5, 0, 5, 32 },
3418+ { 6, 0, 5, 0 }, { 8, 0, 5, 0 }, { 9, 0, 5, 0 },
3419+ { 11, 0, 5, 0 }, { 13, 0, 6, 0 }, { 16, 0, 6, 0 },
3420+ { 19, 0, 6, 0 }, { 22, 0, 6, 0 }, { 25, 0, 6, 0 },
3421+ { 28, 0, 6, 0 }, { 31, 0, 6, 0 }, { 34, 0, 6, 0 },
3422+ { 37, 1, 6, 0 }, { 41, 1, 6, 0 }, { 47, 2, 6, 0 },
3423+ { 59, 3, 6, 0 }, { 83, 4, 6, 0 }, { 131, 7, 6, 0 },
3424+ { 515, 9, 6, 0 }, { 4, 0, 4, 16 }, { 5, 0, 4, 0 },
3425+ { 6, 0, 5, 32 }, { 7, 0, 5, 0 }, { 9, 0, 5, 32 },
3426+ { 10, 0, 5, 0 }, { 12, 0, 6, 0 }, { 15, 0, 6, 0 },
3427+ { 18, 0, 6, 0 }, { 21, 0, 6, 0 }, { 24, 0, 6, 0 },
3428+ { 27, 0, 6, 0 }, { 30, 0, 6, 0 }, { 33, 0, 6, 0 },
3429+ { 35, 1, 6, 0 }, { 39, 1, 6, 0 }, { 43, 2, 6, 0 },
3430+ { 51, 3, 6, 0 }, { 67, 4, 6, 0 }, { 99, 5, 6, 0 },
3431+ { 259, 8, 6, 0 }, { 4, 0, 4, 32 }, { 4, 0, 4, 48 },
3432+ { 5, 0, 4, 16 }, { 7, 0, 5, 32 }, { 8, 0, 5, 32 },
3433+ { 10, 0, 5, 32 }, { 11, 0, 5, 32 }, { 14, 0, 6, 0 },
3434+ { 17, 0, 6, 0 }, { 20, 0, 6, 0 }, { 23, 0, 6, 0 },
3435+ { 26, 0, 6, 0 }, { 29, 0, 6, 0 }, { 32, 0, 6, 0 },
3436+ { 65539, 16, 6, 0 }, { 32771, 15, 6, 0 }, { 16387, 14, 6, 0 },
3437+ { 8195, 13, 6, 0 }, { 4099, 12, 6, 0 }, { 2051, 11, 6, 0 },
3438+ { 1027, 10, 6, 0 },
3439+};
3440+3441+static const struct elf_zstd_fse_baseline_entry elf_zstd_offset_table[32] =
3442+{
3443+ { 1, 0, 5, 0 }, { 64, 6, 4, 0 }, { 512, 9, 5, 0 },
3444+ { 32768, 15, 5, 0 }, { 2097152, 21, 5, 0 }, { 8, 3, 5, 0 },
3445+ { 128, 7, 4, 0 }, { 4096, 12, 5, 0 }, { 262144, 18, 5, 0 },
3446+ { 8388608, 23, 5, 0 }, { 32, 5, 5, 0 }, { 256, 8, 4, 0 },
3447+ { 16384, 14, 5, 0 }, { 1048576, 20, 5, 0 }, { 4, 2, 5, 0 },
3448+ { 128, 7, 4, 16 }, { 2048, 11, 5, 0 }, { 131072, 17, 5, 0 },
3449+ { 4194304, 22, 5, 0 }, { 16, 4, 5, 0 }, { 256, 8, 4, 16 },
3450+ { 8192, 13, 5, 0 }, { 524288, 19, 5, 0 }, { 2, 1, 5, 0 },
3451+ { 64, 6, 4, 16 }, { 1024, 10, 5, 0 }, { 65536, 16, 5, 0 },
3452+ { 268435456, 28, 5, 0 }, { 134217728, 27, 5, 0 }, { 67108864, 26, 5, 0 },
3453+ { 33554432, 25, 5, 0 }, { 16777216, 24, 5, 0 },
3454+};
3455+3456+/* Read a zstd Huffman table and build the decoding table in *TABLE, reading
3457+ and updating *PPIN. This sets *PTABLE_BITS to the number of bits of the
3458+ table, such that the table length is 1 << *TABLE_BITS. ZDEBUG_TABLE is
3459+ scratch space; it must be enough for 512 uint16_t values + 256 32-bit values
3460+ (2048 bytes). Returns 1 on success, 0 on error. */
3461+3462+static int
3463+elf_zstd_read_huff (const unsigned char **ppin, const unsigned char *pinend,
3464+ uint16_t *zdebug_table, uint16_t *table, int *ptable_bits)
3465+{
3466+ const unsigned char *pin;
3467+ unsigned char hdr;
3468+ unsigned char *weights;
3469+ size_t count;
3470+ uint32_t *weight_mark;
3471+ size_t i;
3472+ uint32_t weight_mask;
3473+ size_t table_bits;
3474+3475+ pin = *ppin;
3476+ if (unlikely (pin >= pinend))
3477+ {
3478+ elf_uncompress_failed ();
3479+ return 0;
3480+ }
3481+ hdr = *pin;
3482+ ++pin;
3483+3484+ weights = (unsigned char *) zdebug_table;
3485+3486+ if (hdr < 128)
3487+ {
3488+ /* Table is compressed using FSE. */
3489+3490+ struct elf_zstd_fse_entry *fse_table;
3491+ int fse_table_bits;
3492+ uint16_t *scratch;
3493+ const unsigned char *pfse;
3494+ const unsigned char *pback;
3495+ uint64_t val;
3496+ unsigned int bits;
3497+ unsigned int state1, state2;
3498+3499+ /* SCRATCH is used temporarily by elf_zstd_read_fse. It overlaps
3500+ WEIGHTS. */
3501+ scratch = zdebug_table;
3502+ fse_table = (struct elf_zstd_fse_entry *) (scratch + 512);
3503+ fse_table_bits = 6;
3504+3505+ pfse = pin;
3506+ if (!elf_zstd_read_fse (&pfse, pinend, scratch, 255, fse_table,
3507+ &fse_table_bits))
3508+ return 0;
3509+3510+ if (unlikely (pin + hdr > pinend))
3511+ {
3512+ elf_uncompress_failed ();
3513+ return 0;
3514+ }
3515+3516+ /* We no longer need SCRATCH. Start recording weights. We need up to
3517+ 256 bytes of weights and 64 bytes of rank counts, so it won't overlap
3518+ FSE_TABLE. */
3519+3520+ pback = pin + hdr - 1;
3521+3522+ if (!elf_fetch_backward_init (&pback, pfse, &val, &bits))
3523+ return 0;
3524+3525+ bits -= fse_table_bits;
3526+ state1 = (val >> bits) & ((1U << fse_table_bits) - 1);
3527+ bits -= fse_table_bits;
3528+ state2 = (val >> bits) & ((1U << fse_table_bits) - 1);
3529+3530+ /* There are two independent FSE streams, tracked by STATE1 and STATE2.
3531+ We decode them alternately. */
3532+3533+ count = 0;
3534+ while (1)
3535+ {
3536+ struct elf_zstd_fse_entry *pt;
3537+ uint64_t v;
3538+3539+ pt = &fse_table[state1];
3540+3541+ if (unlikely (pin < pinend) && bits < pt->bits)
3542+ {
3543+ if (unlikely (count >= 254))
3544+ {
3545+ elf_uncompress_failed ();
3546+ return 0;
3547+ }
3548+ weights[count] = (unsigned char) pt->symbol;
3549+ weights[count + 1] = (unsigned char) fse_table[state2].symbol;
3550+ count += 2;
3551+ break;
3552+ }
3553+3554+ if (unlikely (pt->bits == 0))
3555+ v = 0;
3556+ else
3557+ {
3558+ if (!elf_fetch_bits_backward (&pback, pfse, &val, &bits))
3559+ return 0;
3560+3561+ bits -= pt->bits;
3562+ v = (val >> bits) & (((uint64_t)1 << pt->bits) - 1);
3563+ }
3564+3565+ state1 = pt->base + v;
3566+3567+ if (unlikely (count >= 255))
3568+ {
3569+ elf_uncompress_failed ();
3570+ return 0;
3571+ }
3572+3573+ weights[count] = pt->symbol;
3574+ ++count;
3575+3576+ pt = &fse_table[state2];
3577+3578+ if (unlikely (pin < pinend && bits < pt->bits))
3579+ {
3580+ if (unlikely (count >= 254))
3581+ {
3582+ elf_uncompress_failed ();
3583+ return 0;
3584+ }
3585+ weights[count] = (unsigned char) pt->symbol;
3586+ weights[count + 1] = (unsigned char) fse_table[state1].symbol;
3587+ count += 2;
3588+ break;
3589+ }
3590+3591+ if (unlikely (pt->bits == 0))
3592+ v = 0;
3593+ else
3594+ {
3595+ if (!elf_fetch_bits_backward (&pback, pfse, &val, &bits))
3596+ return 0;
3597+3598+ bits -= pt->bits;
3599+ v = (val >> bits) & (((uint64_t)1 << pt->bits) - 1);
3600+ }
3601+3602+ state2 = pt->base + v;
3603+3604+ if (unlikely (count >= 255))
3605+ {
3606+ elf_uncompress_failed ();
3607+ return 0;
3608+ }
3609+3610+ weights[count] = pt->symbol;
3611+ ++count;
3612+ }
3613+3614+ pin += hdr;
3615+ }
3616+ else
3617+ {
3618+ /* Table is not compressed. Each weight is 4 bits. */
3619+3620+ count = hdr - 127;
3621+ if (unlikely (pin + ((count + 1) / 2) >= pinend))
3622+ {
3623+ elf_uncompress_failed ();
3624+ return 0;
3625+ }
3626+ for (i = 0; i < count; i += 2)
3627+ {
3628+ unsigned char b;
3629+3630+ b = *pin;
3631+ ++pin;
3632+ weights[i] = b >> 4;
3633+ weights[i + 1] = b & 0xf;
3634+ }
3635+ }
3636+3637+ weight_mark = (uint32_t *) (weights + 256);
3638+ memset (weight_mark, 0, 12 * sizeof (uint32_t));
3639+ weight_mask = 0;
3640+ for (i = 0; i < count; ++i)
3641+ {
3642+ unsigned char w;
3643+3644+ w = weights[i];
3645+ if (unlikely (w > 12))
3646+ {
3647+ elf_uncompress_failed ();
3648+ return 0;
3649+ }
3650+ ++weight_mark[w];
3651+ if (w > 0)
3652+ weight_mask += 1U << (w - 1);
3653+ }
3654+ if (unlikely (weight_mask == 0))
3655+ {
3656+ elf_uncompress_failed ();
3657+ return 0;
3658+ }
3659+3660+ table_bits = 32 - __builtin_clz (weight_mask);
3661+ if (unlikely (table_bits > 11))
3662+ {
3663+ elf_uncompress_failed ();
3664+ return 0;
3665+ }
3666+3667+ /* Work out the last weight value, which is omitted because the weights must
3668+ sum to a power of two. */
3669+ {
3670+ uint32_t left;
3671+ uint32_t high_bit;
3672+3673+ left = ((uint32_t)1 << table_bits) - weight_mask;
3674+ if (left == 0)
3675+ {
3676+ elf_uncompress_failed ();
3677+ return 0;
3678+ }
3679+ high_bit = 31 - __builtin_clz (left);
3680+ if (((uint32_t)1 << high_bit) != left)
3681+ {
3682+ elf_uncompress_failed ();
3683+ return 0;
3684+ }
3685+3686+ if (unlikely (count >= 256))
3687+ {
3688+ elf_uncompress_failed ();
3689+ return 0;
3690+ }
3691+3692+ weights[count] = high_bit + 1;
3693+ ++count;
3694+ ++weight_mark[high_bit + 1];
3695+ }
3696+3697+ if (weight_mark[1] < 2 || (weight_mark[1] & 1) != 0)
3698+ {
3699+ elf_uncompress_failed ();
3700+ return 0;
3701+ }
3702+3703+ /* Change WEIGHT_MARK from a count of weights to the index of the first
3704+ symbol for that weight. We shift the indexes to also store how many we
3705+ hae seen so far, below. */
3706+ {
3707+ uint32_t next;
3708+3709+ next = 0;
3710+ for (i = 0; i < table_bits; ++i)
3711+ {
3712+ uint32_t cur;
3713+3714+ cur = next;
3715+ next += weight_mark[i + 1] << i;
3716+ weight_mark[i + 1] = cur;
3717+ }
3718+ }
3719+3720+ for (i = 0; i < count; ++i)
3721+ {
3722+ unsigned char weight;
3723+ uint32_t length;
3724+ uint16_t tval;
3725+ size_t start;
3726+ uint32_t j;
3727+3728+ weight = weights[i];
3729+ if (weight == 0)
3730+ continue;
3731+3732+ length = 1U << (weight - 1);
3733+ tval = (i << 8) | (table_bits + 1 - weight);
3734+ start = weight_mark[weight];
3735+ for (j = 0; j < length; ++j)
3736+ table[start + j] = tval;
3737+ weight_mark[weight] += length;
3738+ }
3739+3740+ *ppin = pin;
3741+ *ptable_bits = (int)table_bits;
3742+3743+ return 1;
3744+}
3745+3746+/* Read and decompress the literals and store them ending at POUTEND. This
3747+ works because we are going to use all the literals in the output, so they
3748+ must fit into the output buffer. HUFFMAN_TABLE, and PHUFFMAN_TABLE_BITS
3749+ store the Huffman table across calls. SCRATCH is used to read a Huffman
3750+ table. Store the start of the decompressed literals in *PPLIT. Update
3751+ *PPIN. Return 1 on success, 0 on error. */
3752+3753+static int
3754+elf_zstd_read_literals (const unsigned char **ppin,
3755+ const unsigned char *pinend,
3756+ unsigned char *pout,
3757+ unsigned char *poutend,
3758+ uint16_t *scratch,
3759+ uint16_t *huffman_table,
3760+ int *phuffman_table_bits,
3761+ unsigned char **pplit)
3762+{
3763+ const unsigned char *pin;
3764+ unsigned char *plit;
3765+ unsigned char hdr;
3766+ uint32_t regenerated_size;
3767+ uint32_t compressed_size;
3768+ int streams;
3769+ uint32_t total_streams_size;
3770+ unsigned int huffman_table_bits;
3771+ uint64_t huffman_mask;
3772+3773+ pin = *ppin;
3774+ if (unlikely (pin >= pinend))
3775+ {
3776+ elf_uncompress_failed ();
3777+ return 0;
3778+ }
3779+ hdr = *pin;
3780+ ++pin;
3781+3782+ if ((hdr & 3) == 0 || (hdr & 3) == 1)
3783+ {
3784+ int raw;
3785+3786+ /* Raw_literals_Block or RLE_Literals_Block */
3787+3788+ raw = (hdr & 3) == 0;
3789+3790+ switch ((hdr >> 2) & 3)
3791+ {
3792+ case 0: case 2:
3793+ regenerated_size = hdr >> 3;
3794+ break;
3795+ case 1:
3796+ if (unlikely (pin >= pinend))
3797+ {
3798+ elf_uncompress_failed ();
3799+ return 0;
3800+ }
3801+ regenerated_size = (hdr >> 4) + ((uint32_t)(*pin) << 4);
3802+ ++pin;
3803+ break;
3804+ case 3:
3805+ if (unlikely (pin + 1 >= pinend))
3806+ {
3807+ elf_uncompress_failed ();
3808+ return 0;
3809+ }
3810+ regenerated_size = ((hdr >> 4)
3811+ + ((uint32_t)*pin << 4)
3812+ + ((uint32_t)pin[1] << 12));
3813+ pin += 2;
3814+ break;
3815+ default:
3816+ elf_uncompress_failed ();
3817+ return 0;
3818+ }
3819+3820+ if (unlikely ((size_t)(poutend - pout) < regenerated_size))
3821+ {
3822+ elf_uncompress_failed ();
3823+ return 0;
3824+ }
3825+3826+ plit = poutend - regenerated_size;
3827+3828+ if (raw)
3829+ {
3830+ if (unlikely (pin + regenerated_size >= pinend))
3831+ {
3832+ elf_uncompress_failed ();
3833+ return 0;
3834+ }
3835+ memcpy (plit, pin, regenerated_size);
3836+ pin += regenerated_size;
3837+ }
3838+ else
3839+ {
3840+ if (pin >= pinend)
3841+ {
3842+ elf_uncompress_failed ();
3843+ return 0;
3844+ }
3845+ memset (plit, *pin, regenerated_size);
3846+ ++pin;
3847+ }
3848+3849+ *ppin = pin;
3850+ *pplit = plit;
3851+3852+ return 1;
3853+ }
3854+3855+ /* Compressed_Literals_Block or Treeless_Literals_Block */
3856+3857+ switch ((hdr >> 2) & 3)
3858+ {
3859+ case 0: case 1:
3860+ if (unlikely (pin + 1 >= pinend))
3861+ {
3862+ elf_uncompress_failed ();
3863+ return 0;
3864+ }
3865+ regenerated_size = (hdr >> 4) | ((uint32_t)(*pin & 0x3f) << 4);
3866+ compressed_size = (uint32_t)*pin >> 6 | ((uint32_t)pin[1] << 2);
3867+ pin += 2;
3868+ streams = ((hdr >> 2) & 3) == 0 ? 1 : 4;
3869+ break;
3870+ case 2:
3871+ if (unlikely (pin + 2 >= pinend))
3872+ {
3873+ elf_uncompress_failed ();
3874+ return 0;
3875+ }
3876+ regenerated_size = (((uint32_t)hdr >> 4)
3877+ | ((uint32_t)*pin << 4)
3878+ | (((uint32_t)pin[1] & 3) << 12));
3879+ compressed_size = (((uint32_t)pin[1] >> 2)
3880+ | ((uint32_t)pin[2] << 6));
3881+ pin += 3;
3882+ streams = 4;
3883+ break;
3884+ case 3:
3885+ if (unlikely (pin + 3 >= pinend))
3886+ {
3887+ elf_uncompress_failed ();
3888+ return 0;
3889+ }
3890+ regenerated_size = (((uint32_t)hdr >> 4)
3891+ | ((uint32_t)*pin << 4)
3892+ | (((uint32_t)pin[1] & 0x3f) << 12));
3893+ compressed_size = (((uint32_t)pin[1] >> 6)
3894+ | ((uint32_t)pin[2] << 2)
3895+ | ((uint32_t)pin[3] << 10));
3896+ pin += 4;
3897+ streams = 4;
3898+ break;
3899+ default:
3900+ elf_uncompress_failed ();
3901+ return 0;
3902+ }
3903+3904+ if (unlikely (pin + compressed_size > pinend))
3905+ {
3906+ elf_uncompress_failed ();
3907+ return 0;
3908+ }
3909+3910+ pinend = pin + compressed_size;
3911+ *ppin = pinend;
3912+3913+ if (unlikely ((size_t)(poutend - pout) < regenerated_size))
3914+ {
3915+ elf_uncompress_failed ();
3916+ return 0;
3917+ }
3918+3919+ plit = poutend - regenerated_size;
3920+3921+ *pplit = plit;
3922+3923+ total_streams_size = compressed_size;
3924+ if ((hdr & 3) == 2)
3925+ {
3926+ const unsigned char *ptable;
3927+3928+ /* Compressed_Literals_Block. Read Huffman tree. */
3929+3930+ ptable = pin;
3931+ if (!elf_zstd_read_huff (&ptable, pinend, scratch, huffman_table,
3932+ phuffman_table_bits))
3933+ return 0;
3934+3935+ if (unlikely (total_streams_size < (size_t)(ptable - pin)))
3936+ {
3937+ elf_uncompress_failed ();
3938+ return 0;
3939+ }
3940+3941+ total_streams_size -= ptable - pin;
3942+ pin = ptable;
3943+ }
3944+ else
3945+ {
3946+ /* Treeless_Literals_Block. Reuse previous Huffman tree. */
3947+ if (unlikely (*phuffman_table_bits == 0))
3948+ {
3949+ elf_uncompress_failed ();
3950+ return 0;
3951+ }
3952+ }
3953+3954+ /* Decompress COMPRESSED_SIZE bytes of data at PIN using the huffman table,
3955+ storing REGENERATED_SIZE bytes of decompressed data at PLIT. */
3956+3957+ huffman_table_bits = (unsigned int)*phuffman_table_bits;
3958+ huffman_mask = ((uint64_t)1 << huffman_table_bits) - 1;
3959+3960+ if (streams == 1)
3961+ {
3962+ const unsigned char *pback;
3963+ const unsigned char *pbackend;
3964+ uint64_t val;
3965+ unsigned int bits;
3966+ uint32_t i;
3967+3968+ pback = pin + compressed_size - 1;
3969+ pbackend = pin;
3970+ if (!elf_fetch_backward_init (&pback, pbackend, &val, &bits))
3971+ return 0;
3972+3973+ /* This is one of the inner loops of the decompression algorithm, so we
3974+ put some effort into optimization. We can't get more than 64 bytes
3975+ from a single call to elf_fetch_bits_backward, and we can't subtract
3976+ more than 11 bits at a time. */
3977+3978+ if (regenerated_size >= 64)
3979+ {
3980+ unsigned char *plitstart;
3981+ unsigned char *plitstop;
3982+3983+ plitstart = plit;
3984+ plitstop = plit + regenerated_size - 64;
3985+ while (plit < plitstop)
3986+ {
3987+ uint16_t t;
3988+3989+ if (!elf_fetch_bits_backward (&pback, pbackend, &val, &bits))
3990+ return 0;
3991+3992+ if (bits < 16)
3993+ break;
3994+3995+ while (bits >= 33)
3996+ {
3997+ t = huffman_table[(val >> (bits - huffman_table_bits))
3998+ & huffman_mask];
3999+ *plit = t >> 8;
4000+ ++plit;
4001+ bits -= t & 0xff;
4002+4003+ t = huffman_table[(val >> (bits - huffman_table_bits))
4004+ & huffman_mask];
4005+ *plit = t >> 8;
4006+ ++plit;
4007+ bits -= t & 0xff;
4008+4009+ t = huffman_table[(val >> (bits - huffman_table_bits))
4010+ & huffman_mask];
4011+ *plit = t >> 8;
4012+ ++plit;
4013+ bits -= t & 0xff;
4014+ }
4015+4016+ while (bits > 11)
4017+ {
4018+ t = huffman_table[(val >> (bits - huffman_table_bits))
4019+ & huffman_mask];
4020+ *plit = t >> 8;
4021+ ++plit;
4022+ bits -= t & 0xff;
4023+ }
4024+ }
4025+4026+ regenerated_size -= plit - plitstart;
4027+ }
4028+4029+ for (i = 0; i < regenerated_size; ++i)
4030+ {
4031+ uint16_t t;
4032+4033+ if (!elf_fetch_bits_backward (&pback, pbackend, &val, &bits))
4034+ return 0;
4035+4036+ if (unlikely (bits < huffman_table_bits))
4037+ {
4038+ t = huffman_table[(val << (huffman_table_bits - bits))
4039+ & huffman_mask];
4040+ if (unlikely (bits < (t & 0xff)))
4041+ {
4042+ elf_uncompress_failed ();
4043+ return 0;
4044+ }
4045+ }
4046+ else
4047+ t = huffman_table[(val >> (bits - huffman_table_bits))
4048+ & huffman_mask];
4049+4050+ *plit = t >> 8;
4051+ ++plit;
4052+ bits -= t & 0xff;
4053+ }
4054+4055+ return 1;
4056+ }
4057+4058+ {
4059+ uint32_t stream_size1, stream_size2, stream_size3, stream_size4;
4060+ uint32_t tot;
4061+ const unsigned char *pback1, *pback2, *pback3, *pback4;
4062+ const unsigned char *pbackend1, *pbackend2, *pbackend3, *pbackend4;
4063+ uint64_t val1, val2, val3, val4;
4064+ unsigned int bits1, bits2, bits3, bits4;
4065+ unsigned char *plit1, *plit2, *plit3, *plit4;
4066+ uint32_t regenerated_stream_size;
4067+ uint32_t regenerated_stream_size4;
4068+ uint16_t t1, t2, t3, t4;
4069+ uint32_t i;
4070+ uint32_t limit;
4071+4072+ /* Read jump table. */
4073+ if (unlikely (pin + 5 >= pinend))
4074+ {
4075+ elf_uncompress_failed ();
4076+ return 0;
4077+ }
4078+ stream_size1 = (uint32_t)*pin | ((uint32_t)pin[1] << 8);
4079+ pin += 2;
4080+ stream_size2 = (uint32_t)*pin | ((uint32_t)pin[1] << 8);
4081+ pin += 2;
4082+ stream_size3 = (uint32_t)*pin | ((uint32_t)pin[1] << 8);
4083+ pin += 2;
4084+ tot = stream_size1 + stream_size2 + stream_size3;
4085+ if (unlikely (tot > total_streams_size - 6))
4086+ {
4087+ elf_uncompress_failed ();
4088+ return 0;
4089+ }
4090+ stream_size4 = total_streams_size - 6 - tot;
4091+4092+ pback1 = pin + stream_size1 - 1;
4093+ pbackend1 = pin;
4094+4095+ pback2 = pback1 + stream_size2;
4096+ pbackend2 = pback1 + 1;
4097+4098+ pback3 = pback2 + stream_size3;
4099+ pbackend3 = pback2 + 1;
4100+4101+ pback4 = pback3 + stream_size4;
4102+ pbackend4 = pback3 + 1;
4103+4104+ if (!elf_fetch_backward_init (&pback1, pbackend1, &val1, &bits1))
4105+ return 0;
4106+ if (!elf_fetch_backward_init (&pback2, pbackend2, &val2, &bits2))
4107+ return 0;
4108+ if (!elf_fetch_backward_init (&pback3, pbackend3, &val3, &bits3))
4109+ return 0;
4110+ if (!elf_fetch_backward_init (&pback4, pbackend4, &val4, &bits4))
4111+ return 0;
4112+4113+ regenerated_stream_size = (regenerated_size + 3) / 4;
4114+4115+ plit1 = plit;
4116+ plit2 = plit1 + regenerated_stream_size;
4117+ plit3 = plit2 + regenerated_stream_size;
4118+ plit4 = plit3 + regenerated_stream_size;
4119+4120+ regenerated_stream_size4 = regenerated_size - regenerated_stream_size * 3;
4121+4122+ /* We can't get more than 64 literal bytes from a single call to
4123+ elf_fetch_bits_backward. The fourth stream can be up to 3 bytes less,
4124+ so use as the limit. */
4125+4126+ limit = regenerated_stream_size4 <= 64 ? 0 : regenerated_stream_size4 - 64;
4127+ i = 0;
4128+ while (i < limit)
4129+ {
4130+ if (!elf_fetch_bits_backward (&pback1, pbackend1, &val1, &bits1))
4131+ return 0;
4132+ if (!elf_fetch_bits_backward (&pback2, pbackend2, &val2, &bits2))
4133+ return 0;
4134+ if (!elf_fetch_bits_backward (&pback3, pbackend3, &val3, &bits3))
4135+ return 0;
4136+ if (!elf_fetch_bits_backward (&pback4, pbackend4, &val4, &bits4))
4137+ return 0;
4138+4139+ /* We can't subtract more than 11 bits at a time. */
4140+4141+ do
4142+ {
4143+ t1 = huffman_table[(val1 >> (bits1 - huffman_table_bits))
4144+ & huffman_mask];
4145+ t2 = huffman_table[(val2 >> (bits2 - huffman_table_bits))
4146+ & huffman_mask];
4147+ t3 = huffman_table[(val3 >> (bits3 - huffman_table_bits))
4148+ & huffman_mask];
4149+ t4 = huffman_table[(val4 >> (bits4 - huffman_table_bits))
4150+ & huffman_mask];
4151+4152+ *plit1 = t1 >> 8;
4153+ ++plit1;
4154+ bits1 -= t1 & 0xff;
4155+4156+ *plit2 = t2 >> 8;
4157+ ++plit2;
4158+ bits2 -= t2 & 0xff;
4159+4160+ *plit3 = t3 >> 8;
4161+ ++plit3;
4162+ bits3 -= t3 & 0xff;
4163+4164+ *plit4 = t4 >> 8;
4165+ ++plit4;
4166+ bits4 -= t4 & 0xff;
4167+4168+ ++i;
4169+ }
4170+ while (bits1 > 11 && bits2 > 11 && bits3 > 11 && bits4 > 11);
4171+ }
4172+4173+ while (i < regenerated_stream_size)
4174+ {
4175+ int use4;
4176+4177+ use4 = i < regenerated_stream_size4;
4178+4179+ if (!elf_fetch_bits_backward (&pback1, pbackend1, &val1, &bits1))
4180+ return 0;
4181+ if (!elf_fetch_bits_backward (&pback2, pbackend2, &val2, &bits2))
4182+ return 0;
4183+ if (!elf_fetch_bits_backward (&pback3, pbackend3, &val3, &bits3))
4184+ return 0;
4185+ if (use4)
4186+ {
4187+ if (!elf_fetch_bits_backward (&pback4, pbackend4, &val4, &bits4))
4188+ return 0;
4189+ }
4190+4191+ if (unlikely (bits1 < huffman_table_bits))
4192+ {
4193+ t1 = huffman_table[(val1 << (huffman_table_bits - bits1))
4194+ & huffman_mask];
4195+ if (unlikely (bits1 < (t1 & 0xff)))
4196+ {
4197+ elf_uncompress_failed ();
4198+ return 0;
4199+ }
4200+ }
4201+ else
4202+ t1 = huffman_table[(val1 >> (bits1 - huffman_table_bits))
4203+ & huffman_mask];
4204+4205+ if (unlikely (bits2 < huffman_table_bits))
4206+ {
4207+ t2 = huffman_table[(val2 << (huffman_table_bits - bits2))
4208+ & huffman_mask];
4209+ if (unlikely (bits2 < (t2 & 0xff)))
4210+ {
4211+ elf_uncompress_failed ();
4212+ return 0;
4213+ }
4214+ }
4215+ else
4216+ t2 = huffman_table[(val2 >> (bits2 - huffman_table_bits))
4217+ & huffman_mask];
4218+4219+ if (unlikely (bits3 < huffman_table_bits))
4220+ {
4221+ t3 = huffman_table[(val3 << (huffman_table_bits - bits3))
4222+ & huffman_mask];
4223+ if (unlikely (bits3 < (t3 & 0xff)))
4224+ {
4225+ elf_uncompress_failed ();
4226+ return 0;
4227+ }
4228+ }
4229+ else
4230+ t3 = huffman_table[(val3 >> (bits3 - huffman_table_bits))
4231+ & huffman_mask];
4232+4233+ if (use4)
4234+ {
4235+ if (unlikely (bits4 < huffman_table_bits))
4236+ {
4237+ t4 = huffman_table[(val4 << (huffman_table_bits - bits4))
4238+ & huffman_mask];
4239+ if (unlikely (bits4 < (t4 & 0xff)))
4240+ {
4241+ elf_uncompress_failed ();
4242+ return 0;
4243+ }
4244+ }
4245+ else
4246+ t4 = huffman_table[(val4 >> (bits4 - huffman_table_bits))
4247+ & huffman_mask];
4248+4249+ *plit4 = t4 >> 8;
4250+ ++plit4;
4251+ bits4 -= t4 & 0xff;
4252+ }
4253+4254+ *plit1 = t1 >> 8;
4255+ ++plit1;
4256+ bits1 -= t1 & 0xff;
4257+4258+ *plit2 = t2 >> 8;
4259+ ++plit2;
4260+ bits2 -= t2 & 0xff;
4261+4262+ *plit3 = t3 >> 8;
4263+ ++plit3;
4264+ bits3 -= t3 & 0xff;
4265+4266+ ++i;
4267+ }
4268+ }
4269+4270+ return 1;
4271+}
4272+4273+/* The information used to decompress a sequence code, which can be a literal
4274+ length, an offset, or a match length. */
4275+4276+struct elf_zstd_seq_decode
4277+{
4278+ const struct elf_zstd_fse_baseline_entry *table;
4279+ int table_bits;
4280+};
4281+4282+/* Unpack a sequence code compression mode. */
4283+4284+static int
4285+elf_zstd_unpack_seq_decode (int mode,
4286+ const unsigned char **ppin,
4287+ const unsigned char *pinend,
4288+ const struct elf_zstd_fse_baseline_entry *predef,
4289+ int predef_bits,
4290+ uint16_t *scratch,
4291+ int maxidx,
4292+ struct elf_zstd_fse_baseline_entry *table,
4293+ int table_bits,
4294+ int (*conv)(const struct elf_zstd_fse_entry *,
4295+ int,
4296+ struct elf_zstd_fse_baseline_entry *),
4297+ struct elf_zstd_seq_decode *decode)
4298+{
4299+ switch (mode)
4300+ {
4301+ case 0:
4302+ decode->table = predef;
4303+ decode->table_bits = predef_bits;
4304+ break;
4305+4306+ case 1:
4307+ {
4308+ struct elf_zstd_fse_entry entry;
4309+4310+ if (unlikely (*ppin >= pinend))
4311+ {
4312+ elf_uncompress_failed ();
4313+ return 0;
4314+ }
4315+ entry.symbol = **ppin;
4316+ ++*ppin;
4317+ entry.bits = 0;
4318+ entry.base = 0;
4319+ decode->table_bits = 0;
4320+ if (!conv (&entry, 0, table))
4321+ return 0;
4322+ }
4323+ break;
4324+4325+ case 2:
4326+ {
4327+ struct elf_zstd_fse_entry *fse_table;
4328+4329+ /* We use the same space for the simple FSE table and the baseline
4330+ table. */
4331+ fse_table = (struct elf_zstd_fse_entry *)table;
4332+ decode->table_bits = table_bits;
4333+ if (!elf_zstd_read_fse (ppin, pinend, scratch, maxidx, fse_table,
4334+ &decode->table_bits))
4335+ return 0;
4336+ if (!conv (fse_table, decode->table_bits, table))
4337+ return 0;
4338+ decode->table = table;
4339+ }
4340+ break;
4341+4342+ case 3:
4343+ if (unlikely (decode->table_bits == -1))
4344+ {
4345+ elf_uncompress_failed ();
4346+ return 0;
4347+ }
4348+ break;
4349+4350+ default:
4351+ elf_uncompress_failed ();
4352+ return 0;
4353+ }
4354+4355+ return 1;
4356+}
4357+4358+/* Decompress a zstd stream from PIN/SIN to POUT/SOUT. Code based on RFC 8878.
4359+ Return 1 on success, 0 on error. */
4360+4361+static int
4362+elf_zstd_decompress (const unsigned char *pin, size_t sin,
4363+ unsigned char *zdebug_table, unsigned char *pout,
4364+ size_t sout)
4365+{
4366+ const unsigned char *pinend;
4367+ unsigned char *poutstart;
4368+ unsigned char *poutend;
4369+ struct elf_zstd_seq_decode literal_decode;
4370+ struct elf_zstd_fse_baseline_entry *literal_fse_table;
4371+ struct elf_zstd_seq_decode match_decode;
4372+ struct elf_zstd_fse_baseline_entry *match_fse_table;
4373+ struct elf_zstd_seq_decode offset_decode;
4374+ struct elf_zstd_fse_baseline_entry *offset_fse_table;
4375+ uint16_t *huffman_table;
4376+ int huffman_table_bits;
4377+ uint32_t repeated_offset1;
4378+ uint32_t repeated_offset2;
4379+ uint32_t repeated_offset3;
4380+ uint16_t *scratch;
4381+ unsigned char hdr;
4382+ int has_checksum;
4383+ uint64_t content_size;
4384+ int last_block;
4385+4386+ pinend = pin + sin;
4387+ poutstart = pout;
4388+ poutend = pout + sout;
4389+4390+ literal_decode.table = NULL;
4391+ literal_decode.table_bits = -1;
4392+ literal_fse_table = ((struct elf_zstd_fse_baseline_entry *)
4393+ (zdebug_table + ZSTD_TABLE_LITERAL_FSE_OFFSET));
4394+4395+ match_decode.table = NULL;
4396+ match_decode.table_bits = -1;
4397+ match_fse_table = ((struct elf_zstd_fse_baseline_entry *)
4398+ (zdebug_table + ZSTD_TABLE_MATCH_FSE_OFFSET));
4399+4400+ offset_decode.table = NULL;
4401+ offset_decode.table_bits = -1;
4402+ offset_fse_table = ((struct elf_zstd_fse_baseline_entry *)
4403+ (zdebug_table + ZSTD_TABLE_OFFSET_FSE_OFFSET));
4404+ huffman_table = ((uint16_t *)
4405+ (zdebug_table + ZSTD_TABLE_HUFFMAN_OFFSET));
4406+ huffman_table_bits = 0;
4407+ scratch = ((uint16_t *)
4408+ (zdebug_table + ZSTD_TABLE_WORK_OFFSET));
4409+4410+ repeated_offset1 = 1;
4411+ repeated_offset2 = 4;
4412+ repeated_offset3 = 8;
4413+4414+ if (unlikely (sin < 4))
4415+ {
4416+ elf_uncompress_failed ();
4417+ return 0;
4418+ }
4419+4420+ /* These values are the zstd magic number. */
4421+ if (unlikely (pin[0] != 0x28
4422+ || pin[1] != 0xb5
4423+ || pin[2] != 0x2f
4424+ || pin[3] != 0xfd))
4425+ {
4426+ elf_uncompress_failed ();
4427+ return 0;
4428+ }
4429+4430+ pin += 4;
4431+4432+ if (unlikely (pin >= pinend))
4433+ {
4434+ elf_uncompress_failed ();
4435+ return 0;
4436+ }
4437+4438+ hdr = *pin++;
4439+4440+ /* We expect a single frame. */
4441+ if (unlikely ((hdr & (1 << 5)) == 0))
4442+ {
4443+ elf_uncompress_failed ();
4444+ return 0;
4445+ }
4446+ /* Reserved bit must be zero. */
4447+ if (unlikely ((hdr & (1 << 3)) != 0))
4448+ {
4449+ elf_uncompress_failed ();
4450+ return 0;
4451+ }
4452+ /* We do not expect a dictionary. */
4453+ if (unlikely ((hdr & 3) != 0))
4454+ {
4455+ elf_uncompress_failed ();
4456+ return 0;
4457+ }
4458+ has_checksum = (hdr & (1 << 2)) != 0;
4459+ switch (hdr >> 6)
4460+ {
4461+ case 0:
4462+ if (unlikely (pin >= pinend))
4463+ {
4464+ elf_uncompress_failed ();
4465+ return 0;
4466+ }
4467+ content_size = (uint64_t) *pin++;
4468+ break;
4469+ case 1:
4470+ if (unlikely (pin + 1 >= pinend))
4471+ {
4472+ elf_uncompress_failed ();
4473+ return 0;
4474+ }
4475+ content_size = (((uint64_t) pin[0]) | (((uint64_t) pin[1]) << 8)) + 256;
4476+ pin += 2;
4477+ break;
4478+ case 2:
4479+ if (unlikely (pin + 3 >= pinend))
4480+ {
4481+ elf_uncompress_failed ();
4482+ return 0;
4483+ }
4484+ content_size = ((uint64_t) pin[0]
4485+ | (((uint64_t) pin[1]) << 8)
4486+ | (((uint64_t) pin[2]) << 16)
4487+ | (((uint64_t) pin[3]) << 24));
4488+ pin += 4;
4489+ break;
4490+ case 3:
4491+ if (unlikely (pin + 7 >= pinend))
4492+ {
4493+ elf_uncompress_failed ();
4494+ return 0;
4495+ }
4496+ content_size = ((uint64_t) pin[0]
4497+ | (((uint64_t) pin[1]) << 8)
4498+ | (((uint64_t) pin[2]) << 16)
4499+ | (((uint64_t) pin[3]) << 24)
4500+ | (((uint64_t) pin[4]) << 32)
4501+ | (((uint64_t) pin[5]) << 40)
4502+ | (((uint64_t) pin[6]) << 48)
4503+ | (((uint64_t) pin[7]) << 56));
4504+ pin += 8;
4505+ break;
4506+ default:
4507+ elf_uncompress_failed ();
4508+ return 0;
4509+ }
4510+4511+ if (unlikely (content_size != (size_t) content_size
4512+ || (size_t) content_size != sout))
4513+ {
4514+ elf_uncompress_failed ();
4515+ return 0;
4516+ }
4517+4518+ last_block = 0;
4519+ while (!last_block)
4520+ {
4521+ uint32_t block_hdr;
4522+ int block_type;
4523+ uint32_t block_size;
4524+4525+ if (unlikely (pin + 2 >= pinend))
4526+ {
4527+ elf_uncompress_failed ();
4528+ return 0;
4529+ }
4530+ block_hdr = ((uint32_t) pin[0]
4531+ | (((uint32_t) pin[1]) << 8)
4532+ | (((uint32_t) pin[2]) << 16));
4533+ pin += 3;
4534+4535+ last_block = block_hdr & 1;
4536+ block_type = (block_hdr >> 1) & 3;
4537+ block_size = block_hdr >> 3;
4538+4539+ switch (block_type)
4540+ {
4541+ case 0:
4542+ /* Raw_Block */
4543+ if (unlikely ((size_t) block_size > (size_t) (pinend - pin)))
4544+ {
4545+ elf_uncompress_failed ();
4546+ return 0;
4547+ }
4548+ if (unlikely ((size_t) block_size > (size_t) (poutend - pout)))
4549+ {
4550+ elf_uncompress_failed ();
4551+ return 0;
4552+ }
4553+ memcpy (pout, pin, block_size);
4554+ pout += block_size;
4555+ pin += block_size;
4556+ break;
4557+4558+ case 1:
4559+ /* RLE_Block */
4560+ if (unlikely (pin >= pinend))
4561+ {
4562+ elf_uncompress_failed ();
4563+ return 0;
4564+ }
4565+ if (unlikely ((size_t) block_size > (size_t) (poutend - pout)))
4566+ {
4567+ elf_uncompress_failed ();
4568+ return 0;
4569+ }
4570+ memset (pout, *pin, block_size);
4571+ pout += block_size;
4572+ pin++;
4573+ break;
4574+4575+ case 2:
4576+ {
4577+ const unsigned char *pblockend;
4578+ unsigned char *plitstack;
4579+ unsigned char *plit;
4580+ uint32_t literal_count;
4581+ unsigned char seq_hdr;
4582+ size_t seq_count;
4583+ size_t seq;
4584+ const unsigned char *pback;
4585+ uint64_t val;
4586+ unsigned int bits;
4587+ unsigned int literal_state;
4588+ unsigned int offset_state;
4589+ unsigned int match_state;
4590+4591+ /* Compressed_Block */
4592+ if (unlikely ((size_t) block_size > (size_t) (pinend - pin)))
4593+ {
4594+ elf_uncompress_failed ();
4595+ return 0;
4596+ }
4597+4598+ pblockend = pin + block_size;
4599+4600+ /* Read the literals into the end of the output space, and leave
4601+ PLIT pointing at them. */
4602+4603+ if (!elf_zstd_read_literals (&pin, pblockend, pout, poutend,
4604+ scratch, huffman_table,
4605+ &huffman_table_bits,
4606+ &plitstack))
4607+ return 0;
4608+ plit = plitstack;
4609+ literal_count = poutend - plit;
4610+4611+ seq_hdr = *pin;
4612+ pin++;
4613+ if (seq_hdr < 128)
4614+ seq_count = seq_hdr;
4615+ else if (seq_hdr < 255)
4616+ {
4617+ if (unlikely (pin >= pinend))
4618+ {
4619+ elf_uncompress_failed ();
4620+ return 0;
4621+ }
4622+ seq_count = ((seq_hdr - 128) << 8) + *pin;
4623+ pin++;
4624+ }
4625+ else
4626+ {
4627+ if (unlikely (pin + 1 >= pinend))
4628+ {
4629+ elf_uncompress_failed ();
4630+ return 0;
4631+ }
4632+ seq_count = *pin + (pin[1] << 8) + 0x7f00;
4633+ pin += 2;
4634+ }
4635+4636+ if (seq_count > 0)
4637+ {
4638+ int (*pfn)(const struct elf_zstd_fse_entry *,
4639+ int, struct elf_zstd_fse_baseline_entry *);
4640+4641+ if (unlikely (pin >= pinend))
4642+ {
4643+ elf_uncompress_failed ();
4644+ return 0;
4645+ }
4646+ seq_hdr = *pin;
4647+ ++pin;
4648+4649+ pfn = elf_zstd_make_literal_baseline_fse;
4650+ if (!elf_zstd_unpack_seq_decode ((seq_hdr >> 6) & 3,
4651+ &pin, pinend,
4652+ &elf_zstd_lit_table[0], 6,
4653+ scratch, 35,
4654+ literal_fse_table, 9, pfn,
4655+ &literal_decode))
4656+ return 0;
4657+4658+ pfn = elf_zstd_make_offset_baseline_fse;
4659+ if (!elf_zstd_unpack_seq_decode ((seq_hdr >> 4) & 3,
4660+ &pin, pinend,
4661+ &elf_zstd_offset_table[0], 5,
4662+ scratch, 31,
4663+ offset_fse_table, 8, pfn,
4664+ &offset_decode))
4665+ return 0;
4666+4667+ pfn = elf_zstd_make_match_baseline_fse;
4668+ if (!elf_zstd_unpack_seq_decode ((seq_hdr >> 2) & 3,
4669+ &pin, pinend,
4670+ &elf_zstd_match_table[0], 6,
4671+ scratch, 52,
4672+ match_fse_table, 9, pfn,
4673+ &match_decode))
4674+ return 0;
4675+ }
4676+4677+ pback = pblockend - 1;
4678+ if (!elf_fetch_backward_init (&pback, pin, &val, &bits))
4679+ return 0;
4680+4681+ bits -= literal_decode.table_bits;
4682+ literal_state = ((val >> bits)
4683+ & ((1U << literal_decode.table_bits) - 1));
4684+4685+ if (!elf_fetch_bits_backward (&pback, pin, &val, &bits))
4686+ return 0;
4687+ bits -= offset_decode.table_bits;
4688+ offset_state = ((val >> bits)
4689+ & ((1U << offset_decode.table_bits) - 1));
4690+4691+ if (!elf_fetch_bits_backward (&pback, pin, &val, &bits))
4692+ return 0;
4693+ bits -= match_decode.table_bits;
4694+ match_state = ((val >> bits)
4695+ & ((1U << match_decode.table_bits) - 1));
4696+4697+ seq = 0;
4698+ while (1)
4699+ {
4700+ const struct elf_zstd_fse_baseline_entry *pt;
4701+ uint32_t offset_basebits;
4702+ uint32_t offset_baseline;
4703+ uint32_t offset_bits;
4704+ uint32_t offset_base;
4705+ uint32_t offset;
4706+ uint32_t match_baseline;
4707+ uint32_t match_bits;
4708+ uint32_t match_base;
4709+ uint32_t match;
4710+ uint32_t literal_baseline;
4711+ uint32_t literal_bits;
4712+ uint32_t literal_base;
4713+ uint32_t literal;
4714+ uint32_t need;
4715+ uint32_t add;
4716+4717+ pt = &offset_decode.table[offset_state];
4718+ offset_basebits = pt->basebits;
4719+ offset_baseline = pt->baseline;
4720+ offset_bits = pt->bits;
4721+ offset_base = pt->base;
4722+4723+ /* This case can be more than 16 bits, which is all that
4724+ elf_fetch_bits_backward promises. */
4725+ need = offset_basebits;
4726+ add = 0;
4727+ if (unlikely (need > 16))
4728+ {
4729+ if (!elf_fetch_bits_backward (&pback, pin, &val, &bits))
4730+ return 0;
4731+ bits -= 16;
4732+ add = (val >> bits) & ((1U << 16) - 1);
4733+ need -= 16;
4734+ add <<= need;
4735+ }
4736+ if (need > 0)
4737+ {
4738+ if (!elf_fetch_bits_backward (&pback, pin, &val, &bits))
4739+ return 0;
4740+ bits -= need;
4741+ add += (val >> bits) & ((1U << need) - 1);
4742+ }
4743+4744+ offset = offset_baseline + add;
4745+4746+ pt = &match_decode.table[match_state];
4747+ need = pt->basebits;
4748+ match_baseline = pt->baseline;
4749+ match_bits = pt->bits;
4750+ match_base = pt->base;
4751+4752+ add = 0;
4753+ if (need > 0)
4754+ {
4755+ if (!elf_fetch_bits_backward (&pback, pin, &val, &bits))
4756+ return 0;
4757+ bits -= need;
4758+ add = (val >> bits) & ((1U << need) - 1);
4759+ }
4760+4761+ match = match_baseline + add;
4762+4763+ pt = &literal_decode.table[literal_state];
4764+ need = pt->basebits;
4765+ literal_baseline = pt->baseline;
4766+ literal_bits = pt->bits;
4767+ literal_base = pt->base;
4768+4769+ add = 0;
4770+ if (need > 0)
4771+ {
4772+ if (!elf_fetch_bits_backward (&pback, pin, &val, &bits))
4773+ return 0;
4774+ bits -= need;
4775+ add = (val >> bits) & ((1U << need) - 1);
4776+ }
4777+4778+ literal = literal_baseline + add;
4779+4780+ /* See the comment in elf_zstd_make_offset_baseline_fse. */
4781+ if (offset_basebits > 1)
4782+ {
4783+ repeated_offset3 = repeated_offset2;
4784+ repeated_offset2 = repeated_offset1;
4785+ repeated_offset1 = offset;
4786+ }
4787+ else
4788+ {
4789+ if (unlikely (literal == 0))
4790+ ++offset;
4791+ switch (offset)
4792+ {
4793+ case 1:
4794+ offset = repeated_offset1;
4795+ break;
4796+ case 2:
4797+ offset = repeated_offset2;
4798+ repeated_offset2 = repeated_offset1;
4799+ repeated_offset1 = offset;
4800+ break;
4801+ case 3:
4802+ offset = repeated_offset3;
4803+ repeated_offset3 = repeated_offset2;
4804+ repeated_offset2 = repeated_offset1;
4805+ repeated_offset1 = offset;
4806+ break;
4807+ case 4:
4808+ offset = repeated_offset1 - 1;
4809+ repeated_offset3 = repeated_offset2;
4810+ repeated_offset2 = repeated_offset1;
4811+ repeated_offset1 = offset;
4812+ break;
4813+ }
4814+ }
4815+4816+ ++seq;
4817+ if (seq < seq_count)
4818+ {
4819+ uint32_t v;
4820+4821+ /* Update the three states. */
4822+4823+ if (!elf_fetch_bits_backward (&pback, pin, &val, &bits))
4824+ return 0;
4825+4826+ need = literal_bits;
4827+ bits -= need;
4828+ v = (val >> bits) & (((uint32_t)1 << need) - 1);
4829+4830+ literal_state = literal_base + v;
4831+4832+ if (!elf_fetch_bits_backward (&pback, pin, &val, &bits))
4833+ return 0;
4834+4835+ need = match_bits;
4836+ bits -= need;
4837+ v = (val >> bits) & (((uint32_t)1 << need) - 1);
4838+4839+ match_state = match_base + v;
4840+4841+ if (!elf_fetch_bits_backward (&pback, pin, &val, &bits))
4842+ return 0;
4843+4844+ need = offset_bits;
4845+ bits -= need;
4846+ v = (val >> bits) & (((uint32_t)1 << need) - 1);
4847+4848+ offset_state = offset_base + v;
4849+ }
4850+4851+ /* The next sequence is now in LITERAL, OFFSET, MATCH. */
4852+4853+ /* Copy LITERAL bytes from the literals. */
4854+4855+ if (unlikely ((size_t)(poutend - pout) < literal))
4856+ {
4857+ elf_uncompress_failed ();
4858+ return 0;
4859+ }
4860+4861+ if (unlikely (literal_count < literal))
4862+ {
4863+ elf_uncompress_failed ();
4864+ return 0;
4865+ }
4866+4867+ literal_count -= literal;
4868+4869+ /* Often LITERAL is small, so handle small cases quickly. */
4870+ switch (literal)
4871+ {
4872+ case 8:
4873+ *pout++ = *plit++;
4874+ /* FALLTHROUGH */
4875+ case 7:
4876+ *pout++ = *plit++;
4877+ /* FALLTHROUGH */
4878+ case 6:
4879+ *pout++ = *plit++;
4880+ /* FALLTHROUGH */
4881+ case 5:
4882+ *pout++ = *plit++;
4883+ /* FALLTHROUGH */
4884+ case 4:
4885+ *pout++ = *plit++;
4886+ /* FALLTHROUGH */
4887+ case 3:
4888+ *pout++ = *plit++;
4889+ /* FALLTHROUGH */
4890+ case 2:
4891+ *pout++ = *plit++;
4892+ /* FALLTHROUGH */
4893+ case 1:
4894+ *pout++ = *plit++;
4895+ break;
4896+4897+ case 0:
4898+ break;
4899+4900+ default:
4901+ if (unlikely ((size_t)(plit - pout) < literal))
4902+ {
4903+ uint32_t move;
4904+4905+ move = plit - pout;
4906+ while (literal > move)
4907+ {
4908+ memcpy (pout, plit, move);
4909+ pout += move;
4910+ plit += move;
4911+ literal -= move;
4912+ }
4913+ }
4914+4915+ memcpy (pout, plit, literal);
4916+ pout += literal;
4917+ plit += literal;
4918+ }
4919+4920+ if (match > 0)
4921+ {
4922+ /* Copy MATCH bytes from the decoded output at OFFSET. */
4923+4924+ if (unlikely ((size_t)(poutend - pout) < match))
4925+ {
4926+ elf_uncompress_failed ();
4927+ return 0;
4928+ }
4929+4930+ if (unlikely ((size_t)(pout - poutstart) < offset))
4931+ {
4932+ elf_uncompress_failed ();
4933+ return 0;
4934+ }
4935+4936+ if (offset >= match)
4937+ {
4938+ memcpy (pout, pout - offset, match);
4939+ pout += match;
4940+ }
4941+ else
4942+ {
4943+ while (match > 0)
4944+ {
4945+ uint32_t copy;
4946+4947+ copy = match < offset ? match : offset;
4948+ memcpy (pout, pout - offset, copy);
4949+ match -= copy;
4950+ pout += copy;
4951+ }
4952+ }
4953+ }
4954+4955+ if (unlikely (seq >= seq_count))
4956+ {
4957+ /* Copy remaining literals. */
4958+ if (literal_count > 0 && plit != pout)
4959+ {
4960+ if (unlikely ((size_t)(poutend - pout)
4961+ < literal_count))
4962+ {
4963+ elf_uncompress_failed ();
4964+ return 0;
4965+ }
4966+4967+ if ((size_t)(plit - pout) < literal_count)
4968+ {
4969+ uint32_t move;
4970+4971+ move = plit - pout;
4972+ while (literal_count > move)
4973+ {
4974+ memcpy (pout, plit, move);
4975+ pout += move;
4976+ plit += move;
4977+ literal_count -= move;
4978+ }
4979+ }
4980+4981+ memcpy (pout, plit, literal_count);
4982+ }
4983+4984+ pout += literal_count;
4985+4986+ break;
4987+ }
4988+ }
4989+4990+ pin = pblockend;
4991+ }
4992+ break;
4993+4994+ case 3:
4995+ default:
4996+ elf_uncompress_failed ();
4997+ return 0;
4998+ }
4999+ }
5000+5001+ if (has_checksum)
5002+ {
5003+ if (unlikely (pin + 4 > pinend))
5004+ {
5005+ elf_uncompress_failed ();
5006+ return 0;
5007+ }
5008+5009+ /* We don't currently verify the checksum. Currently running GNU ld with
5010+ --compress-debug-sections=zstd does not seem to generate a
5011+ checksum. */
5012+5013+ pin += 4;
5014+ }
5015+5016+ if (pin != pinend)
5017+ {
5018+ elf_uncompress_failed ();
5019+ return 0;
5020+ }
5021+5022+ return 1;
5023+}
5024+5025+#define ZDEBUG_TABLE_SIZE \
5026+ (ZLIB_TABLE_SIZE > ZSTD_TABLE_SIZE ? ZLIB_TABLE_SIZE : ZSTD_TABLE_SIZE)
5027+5028/* Uncompress the old compressed debug format, the one emitted by
5029 --compress-debug-sections=zlib-gnu. The compressed data is in
5030 COMPRESSED / COMPRESSED_SIZE, and the function writes to
···5094 unsigned char **uncompressed, size_t *uncompressed_size)
5095{
5096 const b_elf_chdr *chdr;
5097+ char *alc;
5098+ size_t alc_len;
5099 unsigned char *po;
51005101 *uncompressed = NULL;
···51075108 chdr = (const b_elf_chdr *) compressed;
51095110+ alc = NULL;
5111+ alc_len = 0;
00005112 if (*uncompressed != NULL && *uncompressed_size >= chdr->ch_size)
5113 po = *uncompressed;
5114 else
5115 {
5116+ alc_len = chdr->ch_size;
5117+ alc = (char*)backtrace_alloc (state, alc_len, error_callback, data);
5118+ if (alc == NULL)
5119 return 0;
5120+ po = (unsigned char *) alc;
5121 }
51225123+ switch (chdr->ch_type)
5124+ {
5125+ case ELFCOMPRESS_ZLIB:
5126+ if (!elf_zlib_inflate_and_verify (compressed + sizeof (b_elf_chdr),
5127+ compressed_size - sizeof (b_elf_chdr),
5128+ zdebug_table, po, chdr->ch_size))
5129+ goto skip;
5130+ break;
5131+5132+ case ELFCOMPRESS_ZSTD:
5133+ if (!elf_zstd_decompress (compressed + sizeof (b_elf_chdr),
5134+ compressed_size - sizeof (b_elf_chdr),
5135+ (unsigned char *)zdebug_table, po,
5136+ chdr->ch_size))
5137+ goto skip;
5138+ break;
5139+5140+ default:
5141+ /* Unsupported compression algorithm. */
5142+ goto skip;
5143+ }
51445145 *uncompressed = po;
5146 *uncompressed_size = chdr->ch_size;
51475148+ return 1;
5149+5150+ skip:
5151+ if (alc != NULL && alc_len > 0)
5152+ backtrace_free (state, alc, alc_len, error_callback, data);
5153 return 1;
5154}
5155···5174 ret = elf_uncompress_zdebug (state, compressed, compressed_size,
5175 zdebug_table, error_callback, data,
5176 uncompressed, uncompressed_size);
5177+ backtrace_free (state, zdebug_table, ZDEBUG_TABLE_SIZE,
5178+ error_callback, data);
5179+ return ret;
5180+}
5181+5182+/* This function is a hook for testing the zstd support. It is only used by
5183+ tests. */
5184+5185+int
5186+backtrace_uncompress_zstd (struct backtrace_state *state,
5187+ const unsigned char *compressed,
5188+ size_t compressed_size,
5189+ backtrace_error_callback error_callback,
5190+ void *data, unsigned char *uncompressed,
5191+ size_t uncompressed_size)
5192+{
5193+ unsigned char *zdebug_table;
5194+ int ret;
5195+5196+ zdebug_table = ((unsigned char *) backtrace_alloc (state, ZDEBUG_TABLE_SIZE,
5197+ error_callback, data));
5198+ if (zdebug_table == NULL)
5199+ return 0;
5200+ ret = elf_zstd_decompress (compressed, compressed_size,
5201+ zdebug_table, uncompressed, uncompressed_size);
5202 backtrace_free (state, zdebug_table, ZDEBUG_TABLE_SIZE,
5203 error_callback, data);
5204 return ret;
···7200 if (zdebug_table == NULL)
7201 {
7202 zdebug_table = ((uint16_t *)
7203+ backtrace_alloc (state, ZLIB_TABLE_SIZE,
7204 error_callback, data));
7205 if (zdebug_table == NULL)
7206 goto fail;
···7226 }
7227 }
72287229+ if (zdebug_table != NULL)
7230+ {
7231+ backtrace_free (state, zdebug_table, ZLIB_TABLE_SIZE,
7232+ error_callback, data);
7233+ zdebug_table = NULL;
7234+ }
7235+7236 /* Uncompress the official ELF format
7237+ (--compress-debug-sections=zlib-gabi, --compress-debug-sections=zstd). */
7238 for (i = 0; i < (int) DEBUG_MAX; ++i)
7239 {
7240 unsigned char *uncompressed_data;
+9
src/external/tracy/libbacktrace/internal.hpp
···371 unsigned char **uncompressed,
372 size_t *uncompressed_size);
373000000000374/* A test-only hook for elf_uncompress_lzma. */
375376extern int backtrace_uncompress_lzma (struct backtrace_state *,
···371 unsigned char **uncompressed,
372 size_t *uncompressed_size);
373374+/* A test-only hook for elf_zstd_decompress. */
375+376+extern int backtrace_uncompress_zstd (struct backtrace_state *,
377+ const unsigned char *compressed,
378+ size_t compressed_size,
379+ backtrace_error_callback, void *data,
380+ unsigned char *uncompressed,
381+ size_t uncompressed_size);
382+383/* A test-only hook for elf_uncompress_lzma. */
384385extern int backtrace_uncompress_lzma (struct backtrace_state *,