miniz_oxide/inflate/
core.rs

1//! Streaming decompression functionality.
2
3use super::*;
4use crate::shared::{update_adler32, HUFFMAN_LENGTH_ORDER};
5use ::core::cell::Cell;
6
7use ::core::cmp;
8use ::core::convert::TryInto;
9
10use self::output_buffer::{InputWrapper, OutputBuffer};
11
12pub const TINFL_LZ_DICT_SIZE: usize = 32_768;
13
14/// A struct containing huffman code lengths and the huffman code tree used by the decompressor.
15#[derive(Clone)]
16struct HuffmanTable {
17    /// Fast lookup table for shorter huffman codes.
18    ///
19    /// See `HuffmanTable::fast_lookup`.
20    pub look_up: [i16; FAST_LOOKUP_SIZE as usize],
21    /// Full huffman tree.
22    ///
23    /// Positive values are edge nodes/symbols, negative values are
24    /// parent nodes/references to other nodes.
25    pub tree: [i16; MAX_HUFF_TREE_SIZE],
26}
27
28impl HuffmanTable {
29    const fn new() -> HuffmanTable {
30        HuffmanTable {
31            look_up: [0; FAST_LOOKUP_SIZE as usize],
32            tree: [0; MAX_HUFF_TREE_SIZE],
33        }
34    }
35
36    /// Look for a symbol in the fast lookup table.
37    /// The symbol is stored in the lower 9 bits, the length in the next 6.
38    /// If the returned value is negative, the code wasn't found in the
39    /// fast lookup table and the full tree has to be traversed to find the code.
40    #[inline]
41    fn fast_lookup(&self, bit_buf: BitBuffer) -> i16 {
42        self.look_up[(bit_buf & BitBuffer::from(FAST_LOOKUP_SIZE - 1)) as usize]
43    }
44
45    /// Get the symbol and the code length from the huffman tree.
46    #[inline]
47    fn tree_lookup(&self, fast_symbol: i32, bit_buf: BitBuffer, mut code_len: u8) -> (i32, u32) {
48        let mut symbol = fast_symbol;
49        // We step through the tree until we encounter a positive value, which indicates a
50        // symbol.
51        loop {
52            // symbol here indicates the position of the left (0) node, if the next bit is 1
53            // we add 1 to the lookup position to get the right node.
54            let tree_index = (!symbol + ((bit_buf >> code_len) & 1) as i32) as usize;
55
56            // Use get here to avoid generatic panic code.
57            // The init_tree code should prevent this from actually going out of bounds
58            // but if there were somehow a bug with that
59            // we would at worst end up with corrupted output in release mode.
60            debug_assert!(tree_index < self.tree.len());
61            symbol = i32::from(self.tree.get(tree_index).copied().unwrap_or(i16::MAX));
62            code_len += 1;
63            if symbol >= 0 {
64                break;
65            }
66        }
67        // Note: Using a u8 for code_len inside this function seems to improve performance, but changing it
68        // in localvars seems to worsen things so we convert it to a u32 here.
69        (symbol, u32::from(code_len))
70    }
71
72    #[inline]
73    /// Look up a symbol and code length from the bits in the provided bit buffer.
74    ///
75    /// Returns Some(symbol, length) on success,
76    /// None if the length is 0.
77    ///
78    /// It's possible we could avoid checking for 0 if we can guarantee a sane table.
79    /// TODO: Check if a smaller type for code_len helps performance.
80    fn lookup(&self, bit_buf: BitBuffer) -> (i32, u32) {
81        let symbol = self.fast_lookup(bit_buf).into();
82        if symbol >= 0 {
83            let length = (symbol >> 9) as u32;
84            (symbol, length)
85        } else {
86            // We didn't get a symbol from the fast lookup table, so check the tree instead.
87            self.tree_lookup(symbol, bit_buf, FAST_LOOKUP_BITS)
88        }
89    }
90}
91
92/// The number of huffman tables used.
93const MAX_HUFF_TABLES: usize = 3;
94/// The length of the first (literal/length) huffman table.
95const MAX_HUFF_SYMBOLS_0: usize = 288;
96/// The length of the second (distance) huffman table.
97const MAX_HUFF_SYMBOLS_1: usize = 32;
98/// The length of the last (huffman code length) huffman table.
99const MAX_HUFF_SYMBOLS_2: usize = 19;
100/// The maximum length of a code that can be looked up in the fast lookup table.
101const FAST_LOOKUP_BITS: u8 = 10;
102/// The size of the fast lookup table.
103const FAST_LOOKUP_SIZE: u32 = 1 << FAST_LOOKUP_BITS;
104const MAX_HUFF_TREE_SIZE: usize = MAX_HUFF_SYMBOLS_0 * 2;
105const LITLEN_TABLE: usize = 0;
106const DIST_TABLE: usize = 1;
107const HUFFLEN_TABLE: usize = 2;
108
109/// Flags to [`decompress()`] to control how inflation works.
110///
111/// These define bits for a bitmask argument.
112pub mod inflate_flags {
113    /// Should we try to parse a zlib header?
114    ///
115    /// If unset, the function will expect an RFC1951 deflate stream.  If set, it will expect a
116    /// RFC1950 zlib wrapper around the deflate stream.
117    pub const TINFL_FLAG_PARSE_ZLIB_HEADER: u32 = 1;
118
119    /// There will be more input that hasn't been given to the decompressor yet.
120    ///
121    /// This is useful when you want to decompress what you have so far,
122    /// even if you know there is probably more input that hasn't gotten here yet (_e.g._, over a
123    /// network connection).  When [`decompress()`][super::decompress] reaches the end of the input
124    /// without finding the end of the compressed stream, it will return
125    /// [`TINFLStatus::NeedsMoreInput`][super::TINFLStatus::NeedsMoreInput] if this is set,
126    /// indicating that you should get more data before calling again.  If not set, it will return
127    /// [`TINFLStatus::FailedCannotMakeProgress`][super::TINFLStatus::FailedCannotMakeProgress]
128    /// suggesting the stream is corrupt, since you claimed it was all there.
129    pub const TINFL_FLAG_HAS_MORE_INPUT: u32 = 2;
130
131    /// The output buffer should not wrap around.
132    pub const TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF: u32 = 4;
133
134    /// Calculate the adler32 checksum of the output data even if we're not inflating a zlib stream.
135    ///
136    /// If [`TINFL_FLAG_IGNORE_ADLER32`] is specified, it will override this.
137    ///
138    /// NOTE: Enabling/disabling this between calls to decompress will result in an incorrect
139    /// checksum.
140    pub const TINFL_FLAG_COMPUTE_ADLER32: u32 = 8;
141
142    /// Ignore adler32 checksum even if we are inflating a zlib stream.
143    ///
144    /// Overrides [`TINFL_FLAG_COMPUTE_ADLER32`] if both are enabled.
145    ///
146    /// NOTE: This flag does not exist in miniz as it does not support this and is a
147    /// custom addition for miniz_oxide.
148    ///
149    /// NOTE: Should not be changed from enabled to disabled after decompression has started,
150    /// this will result in checksum failure (outside the unlikely event where the checksum happens
151    /// to match anyway).
152    pub const TINFL_FLAG_IGNORE_ADLER32: u32 = 64;
153}
154
155use self::inflate_flags::*;
156
157const MIN_TABLE_SIZES: [u16; 3] = [257, 1, 4];
158
159#[cfg(target_pointer_width = "64")]
160type BitBuffer = u64;
161
162#[cfg(not(target_pointer_width = "64"))]
163type BitBuffer = u32;
164
165/*
166enum HuffmanTableType {
167    LiteralLength = 0,
168    Dist = 1,
169    Huffman = 2,
170}*/
171
172/// Main decompression struct.
173///
174#[derive(Clone)]
175pub struct DecompressorOxide {
176    /// Current state of the decompressor.
177    state: core::State,
178    /// Number of bits in the bit buffer.
179    num_bits: u32,
180    /// Zlib CMF
181    z_header0: u32,
182    /// Zlib FLG
183    z_header1: u32,
184    /// Adler32 checksum from the zlib header.
185    z_adler32: u32,
186    /// 1 if the current block is the last block, 0 otherwise.
187    finish: u8,
188    /// The type of the current block.
189    /// or if in a dynamic block, which huffman table we are currently
190    // initializing.
191    block_type: u8,
192    /// 1 if the adler32 value should be checked.
193    check_adler32: u32,
194    /// Last match distance.
195    dist: u32,
196    /// Variable used for match length, symbols, and a number of other things.
197    counter: u32,
198    /// Number of extra bits for the last length or distance code.
199    num_extra: u8,
200    /// Number of entries in each huffman table.
201    table_sizes: [u16; MAX_HUFF_TABLES],
202    /// Buffer of input data.
203    bit_buf: BitBuffer,
204    /// Huffman tables.
205    tables: [HuffmanTable; MAX_HUFF_TABLES],
206    code_size_literal: [u8; MAX_HUFF_SYMBOLS_0],
207    code_size_dist: [u8; MAX_HUFF_SYMBOLS_1],
208    code_size_huffman: [u8; MAX_HUFF_SYMBOLS_2],
209    /// Raw block header.
210    raw_header: [u8; 4],
211    /// Huffman length codes.
212    len_codes: [u8; MAX_HUFF_SYMBOLS_0 + MAX_HUFF_SYMBOLS_1 + 137],
213}
214
215impl DecompressorOxide {
216    /// Create a new tinfl_decompressor with all fields set to 0.
217    pub fn new() -> DecompressorOxide {
218        DecompressorOxide::default()
219    }
220
221    /// Set the current state to `Start`.
222    #[inline]
223    pub fn init(&mut self) {
224        // The rest of the data is reset or overwritten when used.
225        self.state = core::State::Start;
226    }
227
228    /// Returns the adler32 checksum of the currently decompressed data.
229    /// Note: Will return Some(1) if decompressing zlib but ignoring adler32.
230    #[inline]
231    pub fn adler32(&self) -> Option<u32> {
232        if self.state != State::Start && !self.state.is_failure() && self.z_header0 != 0 {
233            Some(self.check_adler32)
234        } else {
235            None
236        }
237    }
238
239    /// Returns the adler32 that was read from the zlib header if it exists.
240    #[inline]
241    pub fn adler32_header(&self) -> Option<u32> {
242        if self.state != State::Start && self.state != State::BadZlibHeader && self.z_header0 != 0 {
243            Some(self.z_adler32)
244        } else {
245            None
246        }
247    }
248
249    // Get zlib header for tests
250    // Only for tests for now, may provide a proper function for this for later.
251    #[cfg(all(test, feature = "with-alloc"))]
252    pub(crate) const fn zlib_header(&self) -> (u32, u32) {
253        (self.z_header0, self.z_header1)
254    }
255
256    /*fn code_size_table(&mut self, table_num: u8) -> &mut [u8] {
257        match table_num {
258            0 => &mut self.code_size_literal,
259            1 => &mut self.code_size_dist,
260            _ => &mut self.code_size_huffman,
261        }
262    }*/
263}
264
265impl Default for DecompressorOxide {
266    /// Create a new tinfl_decompressor with all fields set to 0.
267    #[inline(always)]
268    fn default() -> Self {
269        DecompressorOxide {
270            state: core::State::Start,
271            num_bits: 0,
272            z_header0: 0,
273            z_header1: 0,
274            z_adler32: 0,
275            finish: 0,
276            block_type: 0,
277            check_adler32: 0,
278            dist: 0,
279            counter: 0,
280            num_extra: 0,
281            table_sizes: [0; MAX_HUFF_TABLES],
282            bit_buf: 0,
283            // TODO:(oyvindln) Check that copies here are optimized out in release mode.
284            tables: [
285                HuffmanTable::new(),
286                HuffmanTable::new(),
287                HuffmanTable::new(),
288            ],
289            code_size_literal: [0; MAX_HUFF_SYMBOLS_0],
290            code_size_dist: [0; MAX_HUFF_SYMBOLS_1],
291            code_size_huffman: [0; MAX_HUFF_SYMBOLS_2],
292            raw_header: [0; 4],
293            len_codes: [0; MAX_HUFF_SYMBOLS_0 + MAX_HUFF_SYMBOLS_1 + 137],
294        }
295    }
296}
297
298#[derive(Copy, Clone, PartialEq, Eq, Debug)]
299#[non_exhaustive]
300enum State {
301    Start = 0,
302    ReadZlibCmf,
303    ReadZlibFlg,
304    ReadBlockHeader,
305    BlockTypeNoCompression,
306    RawHeader,
307    RawMemcpy1,
308    RawMemcpy2,
309    ReadTableSizes,
310    ReadHufflenTableCodeSize,
311    ReadLitlenDistTablesCodeSize,
312    ReadExtraBitsCodeSize,
313    DecodeLitlen,
314    WriteSymbol,
315    ReadExtraBitsLitlen,
316    DecodeDistance,
317    ReadExtraBitsDistance,
318    RawReadFirstByte,
319    RawStoreFirstByte,
320    WriteLenBytesToEnd,
321    BlockDone,
322    HuffDecodeOuterLoop1,
323    HuffDecodeOuterLoop2,
324    ReadAdler32,
325
326    DoneForever,
327
328    // Failure states.
329    BlockTypeUnexpected,
330    BadCodeSizeSum,
331    BadDistOrLiteralTableLength,
332    BadTotalSymbols,
333    BadZlibHeader,
334    DistanceOutOfBounds,
335    BadRawLength,
336    BadCodeSizeDistPrevLookup,
337    InvalidLitlen,
338    InvalidDist,
339}
340
341impl State {
342    const fn is_failure(self) -> bool {
343        matches!(
344            self,
345            BlockTypeUnexpected
346                | BadCodeSizeSum
347                | BadDistOrLiteralTableLength
348                | BadTotalSymbols
349                | BadZlibHeader
350                | DistanceOutOfBounds
351                | BadRawLength
352                | BadCodeSizeDistPrevLookup
353                | InvalidLitlen
354                | InvalidDist
355        )
356    }
357
358    #[inline]
359    fn begin(&mut self, new_state: State) {
360        *self = new_state;
361    }
362}
363
364use self::State::*;
365
366// # Optimization
367// We add a extra value at the end and make the tables 32 elements long
368// so we can use a mask to avoid bounds checks.
369// The invalid values are set to something high enough to avoid underflowing
370// the match length.
371/// Base length for each length code.
372///
373/// The base is used together with the value of the extra bits to decode the actual
374/// length/distance values in a match.
375#[rustfmt::skip]
376const LENGTH_BASE: [u16; 32] = [
377    3,  4,  5,  6,  7,  8,  9,  10,  11,  13,  15,  17,  19,  23,  27,  31,
378    35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 512, 512, 512
379];
380
381/// Number of extra bits for each length code.
382#[rustfmt::skip]
383const LENGTH_EXTRA: [u8; 32] = [
384    0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
385    3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 0, 0, 0
386];
387
388/// Base length for each distance code.
389#[rustfmt::skip]
390const DIST_BASE: [u16; 30] = [
391    1,    2,    3,    4,    5,    7,      9,      13,     17,     25,    33,
392    49,   65,   97,   129,  193,  257,    385,    513,    769,    1025,  1537,
393    2049, 3073, 4097, 6145, 8193, 12_289, 16_385, 24_577
394];
395
396/// Get the number of extra bits used for a distance code.
397/// (Code numbers above `NUM_DISTANCE_CODES` will give some garbage
398/// value.)
399#[inline(always)]
400const fn num_extra_bits_for_distance_code(code: u8) -> u8 {
401    // TODO: Need to verify that this is faster on all platforms.
402    // This can be easily calculated without a lookup.
403    let c = code >> 1;
404    c.saturating_sub(1)
405}
406
407/// The mask used when indexing the base/extra arrays.
408const BASE_EXTRA_MASK: usize = 32 - 1;
409
410/// Read an le u16 value from the slice iterator.
411///
412/// # Panics
413/// Panics if there are less than two bytes left.
414#[inline]
415fn read_u16_le(iter: &mut InputWrapper) -> u16 {
416    let ret = {
417        let two_bytes = iter.as_slice()[..2].try_into().unwrap_or_default();
418        u16::from_le_bytes(two_bytes)
419    };
420    iter.advance(2);
421    ret
422}
423
424/// Ensure that there is data in the bit buffer.
425///
426/// On 64-bit platform, we use a 64-bit value so this will
427/// result in there being at least 32 bits in the bit buffer.
428/// This function assumes that there is at least 4 bytes left in the input buffer.
429#[inline(always)]
430#[cfg(target_pointer_width = "64")]
431fn fill_bit_buffer(l: &mut LocalVars, in_iter: &mut InputWrapper) {
432    // Read four bytes into the buffer at once.
433    if l.num_bits < 30 {
434        l.bit_buf |= BitBuffer::from(in_iter.read_u32_le()) << l.num_bits;
435        l.num_bits += 32;
436    }
437}
438
439/// Same as previous, but for non-64-bit platforms.
440/// Ensures at least 16 bits are present, requires at least 2 bytes in the in buffer.
441#[inline(always)]
442#[cfg(not(target_pointer_width = "64"))]
443fn fill_bit_buffer(l: &mut LocalVars, in_iter: &mut InputWrapper) {
444    // If the buffer is 32-bit wide, read 2 bytes instead.
445    if l.num_bits < 15 {
446        l.bit_buf |= BitBuffer::from(read_u16_le(in_iter)) << l.num_bits;
447        l.num_bits += 16;
448    }
449}
450
451/// Check that the zlib header is correct and that there is enough space in the buffer
452/// for the window size specified in the header.
453///
454/// See https://tools.ietf.org/html/rfc1950
455#[inline]
456const fn validate_zlib_header(cmf: u32, flg: u32, flags: u32, mask: usize) -> Action {
457    let mut failed =
458    // cmf + flg should be divisible by 31.
459        (((cmf * 256) + flg) % 31 != 0) ||
460    // If this flag is set, a dictionary was used for this zlib compressed data.
461    // This is currently not supported by miniz or miniz-oxide
462        ((flg & 0b0010_0000) != 0) ||
463    // Compression method. Only 8(DEFLATE) is defined by the standard.
464        ((cmf & 15) != 8);
465
466    let window_size = 1 << ((cmf >> 4) + 8);
467    if (flags & TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF) == 0 {
468        // Bail if the buffer is wrapping and the window size is larger than the buffer.
469        failed |= (mask + 1) < window_size;
470    }
471
472    // Zlib doesn't allow window sizes above 32 * 1024.
473    failed |= window_size > 32_768;
474
475    if failed {
476        Action::Jump(BadZlibHeader)
477    } else {
478        Action::Jump(ReadBlockHeader)
479    }
480}
481
482enum Action {
483    None,
484    Jump(State),
485    End(TINFLStatus),
486}
487
488/// Try to decode the next huffman code, and puts it in the counter field of the decompressor
489/// if successful.
490///
491/// # Returns
492/// The specified action returned from `f` on success,
493/// `Action::End` if there are not enough data left to decode a symbol.
494fn decode_huffman_code<F>(
495    r: &mut DecompressorOxide,
496    l: &mut LocalVars,
497    table: usize,
498    flags: u32,
499    in_iter: &mut InputWrapper,
500    f: F,
501) -> Action
502where
503    F: FnOnce(&mut DecompressorOxide, &mut LocalVars, i32) -> Action,
504{
505    // As the huffman codes can be up to 15 bits long we need at least 15 bits
506    // ready in the bit buffer to start decoding the next huffman code.
507    if l.num_bits < 15 {
508        // First, make sure there is enough data in the bit buffer to decode a huffman code.
509        if in_iter.bytes_left() < 2 {
510            // If there is less than 2 bytes left in the input buffer, we try to look up
511            // the huffman code with what's available, and return if that doesn't succeed.
512            // Original explanation in miniz:
513            // /* TINFL_HUFF_BITBUF_FILL() is only used rarely, when the number of bytes
514            //  * remaining in the input buffer falls below 2. */
515            // /* It reads just enough bytes from the input stream that are needed to decode
516            //  * the next Huffman code (and absolutely no more). It works by trying to fully
517            //  * decode a */
518            // /* Huffman code by using whatever bits are currently present in the bit buffer.
519            //  * If this fails, it reads another byte, and tries again until it succeeds or
520            //  * until the */
521            // /* bit buffer contains >=15 bits (deflate's max. Huffman code size). */
522            loop {
523                let mut temp = i32::from(r.tables[table].fast_lookup(l.bit_buf));
524                if temp >= 0 {
525                    let code_len = (temp >> 9) as u32;
526                    // TODO: Is there any point to check for code_len != 0 here still?
527                    if (code_len != 0) && (l.num_bits >= code_len) {
528                        break;
529                    }
530                } else if l.num_bits > FAST_LOOKUP_BITS.into() {
531                    let mut code_len = u32::from(FAST_LOOKUP_BITS);
532                    loop {
533                        temp = i32::from(
534                            r.tables[table].tree
535                                [(!temp + ((l.bit_buf >> code_len) & 1) as i32) as usize],
536                        );
537                        code_len += 1;
538                        if temp >= 0 || l.num_bits < code_len + 1 {
539                            break;
540                        }
541                    }
542                    if temp >= 0 {
543                        break;
544                    }
545                }
546
547                // TODO: miniz jumps straight to here after getting here again after failing to read
548                // a byte.
549                // Doing that lets miniz avoid re-doing the lookup that that was done in the
550                // previous call.
551                let mut byte = 0;
552                if let a @ Action::End(_) = read_byte(in_iter, flags, |b| {
553                    byte = b;
554                    Action::None
555                }) {
556                    return a;
557                };
558
559                // Do this outside closure for now to avoid borrowing r.
560                l.bit_buf |= BitBuffer::from(byte) << l.num_bits;
561                l.num_bits += 8;
562
563                if l.num_bits >= 15 {
564                    break;
565                }
566            }
567        } else {
568            // There is enough data in the input buffer, so read the next two bytes
569            // and add them to the bit buffer.
570            // Unwrapping here is fine since we just checked that there are at least two
571            // bytes left.
572            l.bit_buf |= BitBuffer::from(read_u16_le(in_iter)) << l.num_bits;
573            l.num_bits += 16;
574        }
575    }
576
577    // We now have at least 15 bits in the input buffer.
578    let mut symbol = i32::from(r.tables[table].fast_lookup(l.bit_buf));
579    let code_len;
580    // If the symbol was found in the fast lookup table.
581    if symbol >= 0 {
582        // Get the length value from the top bits.
583        // As we shift down the sign bit, converting to an unsigned value
584        // shouldn't overflow.
585        code_len = (symbol >> 9) as u32;
586        // Mask out the length value.
587        symbol &= 511;
588    } else {
589        let res = r.tables[table].tree_lookup(symbol, l.bit_buf, FAST_LOOKUP_BITS);
590        symbol = res.0;
591        code_len = res.1;
592    };
593
594    l.bit_buf >>= code_len;
595    l.num_bits -= code_len;
596    f(r, l, symbol)
597}
598
599/// Try to read one byte from `in_iter` and call `f` with the read byte as an argument,
600/// returning the result.
601/// If reading fails, `Action::End is returned`
602#[inline]
603fn read_byte<F>(in_iter: &mut InputWrapper, flags: u32, f: F) -> Action
604where
605    F: FnOnce(u8) -> Action,
606{
607    match in_iter.read_byte() {
608        None => end_of_input(flags),
609        Some(byte) => f(byte),
610    }
611}
612
613// TODO: `l: &mut LocalVars` may be slow similar to decompress_fast (even with inline(always))
614/// Try to read `amount` number of bits from `in_iter` and call the function `f` with the bits as an
615/// an argument after reading, returning the result of that function, or `Action::End` if there are
616/// not enough bytes left.
617#[inline]
618#[allow(clippy::while_immutable_condition)]
619fn read_bits<F>(
620    l: &mut LocalVars,
621    amount: u32,
622    in_iter: &mut InputWrapper,
623    flags: u32,
624    f: F,
625) -> Action
626where
627    F: FnOnce(&mut LocalVars, BitBuffer) -> Action,
628{
629    // Clippy gives a false positive warning here due to the closure.
630    // Read enough bytes from the input iterator to cover the number of bits we want.
631    while l.num_bits < amount {
632        let action = read_byte(in_iter, flags, |byte| {
633            l.bit_buf |= BitBuffer::from(byte) << l.num_bits;
634            l.num_bits += 8;
635            Action::None
636        });
637
638        // If there are not enough bytes in the input iterator, return and signal that we need more.
639        if !matches!(action, Action::None) {
640            return action;
641        }
642    }
643
644    let bits = l.bit_buf & ((1 << amount) - 1);
645    l.bit_buf >>= amount;
646    l.num_bits -= amount;
647    f(l, bits)
648}
649
650#[inline]
651fn pad_to_bytes<F>(l: &mut LocalVars, in_iter: &mut InputWrapper, flags: u32, f: F) -> Action
652where
653    F: FnOnce(&mut LocalVars) -> Action,
654{
655    let num_bits = l.num_bits & 7;
656    read_bits(l, num_bits, in_iter, flags, |l, _| f(l))
657}
658
659#[inline]
660const fn end_of_input(flags: u32) -> Action {
661    Action::End(if flags & TINFL_FLAG_HAS_MORE_INPUT != 0 {
662        TINFLStatus::NeedsMoreInput
663    } else {
664        TINFLStatus::FailedCannotMakeProgress
665    })
666}
667
668#[inline]
669fn undo_bytes(l: &mut LocalVars, max: u32) -> u32 {
670    let res = cmp::min(l.num_bits >> 3, max);
671    l.num_bits -= res << 3;
672    res
673}
674
675fn start_static_table(r: &mut DecompressorOxide) {
676    r.table_sizes[LITLEN_TABLE] = 288;
677    r.table_sizes[DIST_TABLE] = 32;
678    r.code_size_literal[0..144].fill(8);
679    r.code_size_literal[144..256].fill(9);
680    r.code_size_literal[256..280].fill(7);
681    r.code_size_literal[280..288].fill(8);
682    r.code_size_dist[0..32].fill(5);
683}
684
685#[cfg(any(
686    feature = "rustc-dep-of-std",
687    target_arch = "aarch64",
688    target_arch = "arm64ec",
689    target_arch = "loongarch64"
690))]
691fn reverse_bits(n: u32) -> u32 {
692    // Lookup is not used when building as part of std to avoid wasting space
693    // for lookup table in every rust binary
694    // as it's only used for backtraces in the cold path
695    // - see #152
696
697    // armv7 and newer, and loongarch have a cpu instruction for bit reversal so
698    // it's preferable to just use that on those architectures.
699    n.reverse_bits()
700}
701
702#[cfg(not(any(
703    feature = "rustc-dep-of-std",
704    target_arch = "aarch64",
705    target_arch = "arm64ec",
706    target_arch = "loongarch64"
707)))]
708fn reverse_bits(n: u32) -> u32 {
709    static REVERSED_BITS_LOOKUP: [u32; 512] = {
710        let mut table = [0; 512];
711
712        let mut i = 0;
713        while i < 512 {
714            table[i] = (i as u32).reverse_bits();
715            i += 1;
716        }
717
718        table
719    };
720    REVERSED_BITS_LOOKUP[n as usize]
721}
722
723fn init_tree(r: &mut DecompressorOxide, l: &mut LocalVars) -> Option<Action> {
724    loop {
725        let bt = r.block_type as usize;
726
727        let code_sizes = match bt {
728            LITLEN_TABLE => &mut r.code_size_literal[..],
729            DIST_TABLE => &mut r.code_size_dist,
730            HUFFLEN_TABLE => &mut r.code_size_huffman,
731            _ => return None,
732        };
733        let table = &mut r.tables[bt];
734
735        let mut total_symbols = [0u16; 16];
736        let mut next_code = [0u32; 17];
737        const INVALID_CODE: i16 = 1 << 9 | 286;
738        // Set the values in the fast table to return a
739        // non-zero length and an invalid symbol instead of zero
740        // so that we do not have to have a check for a zero
741        // code length in the hot code path later
742        // and can instead error out on the invalid symbol check
743        // on bogus input.
744        table.look_up.fill(INVALID_CODE);
745        // If we are initializing the huffman code length we can skip
746        // this since these codes can't be longer than 3 bits
747        // and thus only use the fast table and this table won't be accessed so
748        // there is no point clearing it.
749        // TODO: Avoid creating this table at all.
750        if bt != HUFFLEN_TABLE {
751            table.tree.fill(0);
752        }
753
754        let table_size = r.table_sizes[bt] as usize;
755        if table_size > code_sizes.len() {
756            return None;
757        }
758        for &code_size in &code_sizes[..table_size] {
759            let cs = code_size as usize;
760            if cs >= total_symbols.len() {
761                return None;
762            }
763            total_symbols[cs] += 1;
764        }
765
766        let mut used_symbols = 0;
767        let mut total = 0u32;
768        // Count up the total number of used lengths and check that the table is not under or over-subscribed.
769        for (&ts, next) in total_symbols.iter().zip(next_code[1..].iter_mut()).skip(1) {
770            used_symbols += ts;
771            total += u32::from(ts);
772            total <<= 1;
773            *next = total;
774        }
775
776        //
777        // While it's not explicitly stated in the spec, a hufflen table
778        // with a single length (or none) would be invalid as there needs to be
779        // at minimum a length for both a non-zero length huffman code for the end of block symbol
780        // and one of the codes to represent 0 to make sense - so just reject that here as well.
781        //
782        // The distance table is allowed to have a single distance code though according to the spect it is
783        // supposed to be accompanied by a second dummy code. It can also be empty indicating no used codes.
784        //
785        // The literal/length table can not be empty as there has to be an end of block symbol,
786        // The standard doesn't specify that there should be a dummy code in case of a single
787        // symbol (i.e an empty block). Normally that's not an issue though the code will have
788        // to take that into account later on in case of malformed input.
789        if total != 65_536 && (used_symbols > 1 || bt == HUFFLEN_TABLE) {
790            return Some(Action::Jump(BadTotalSymbols));
791        }
792
793        let mut tree_next = -1;
794        for symbol_index in 0..table_size {
795            let code_size = code_sizes[symbol_index];
796            if code_size == 0 || usize::from(code_size) >= next_code.len() {
797                continue;
798            }
799
800            let cur_code = next_code[code_size as usize];
801            next_code[code_size as usize] += 1;
802
803            let n = cur_code & (u32::MAX >> (32 - code_size));
804
805            let mut rev_code = if n < 512 {
806                // Using a lookup table
807                // for a small speedup here,
808                // Seems to only really make a difference on very short
809                // inputs however.
810                // 512 seems to be around a sweet spot.
811                reverse_bits(n)
812            } else {
813                n.reverse_bits()
814            } >> (32 - code_size);
815
816            if code_size <= FAST_LOOKUP_BITS {
817                let k = (i16::from(code_size) << 9) | symbol_index as i16;
818                while rev_code < FAST_LOOKUP_SIZE {
819                    table.look_up[rev_code as usize] = k;
820                    rev_code += 1 << code_size;
821                }
822                continue;
823            }
824
825            let mut tree_cur = table.look_up[(rev_code & (FAST_LOOKUP_SIZE - 1)) as usize];
826            if tree_cur == INVALID_CODE {
827                table.look_up[(rev_code & (FAST_LOOKUP_SIZE - 1)) as usize] = tree_next;
828                tree_cur = tree_next;
829                tree_next -= 2;
830            }
831
832            rev_code >>= FAST_LOOKUP_BITS - 1;
833            for _ in FAST_LOOKUP_BITS + 1..code_size {
834                rev_code >>= 1;
835                tree_cur -= (rev_code & 1) as i16;
836                let tree_index = (-tree_cur - 1) as usize;
837                if tree_index >= table.tree.len() {
838                    return None;
839                }
840                if table.tree[tree_index] == 0 {
841                    table.tree[tree_index] = tree_next;
842                    tree_cur = tree_next;
843                    tree_next -= 2;
844                } else {
845                    tree_cur = table.tree[tree_index];
846                }
847            }
848
849            rev_code >>= 1;
850            tree_cur -= (rev_code & 1) as i16;
851            let tree_index = (-tree_cur - 1) as usize;
852            if tree_index >= table.tree.len() {
853                return None;
854            }
855            table.tree[tree_index] = symbol_index as i16;
856        }
857
858        if r.block_type == HUFFLEN_TABLE as u8 {
859            l.counter = 0;
860            return Some(Action::Jump(ReadLitlenDistTablesCodeSize));
861        }
862
863        if r.block_type == LITLEN_TABLE as u8 {
864            break;
865        }
866        r.block_type -= 1;
867    }
868
869    l.counter = 0;
870
871    Some(Action::Jump(DecodeLitlen))
872}
873
874// A helper macro for generating the state machine.
875//
876// As Rust doesn't have fallthrough on matches, we have to return to the match statement
877// and jump for each state change. (Which would ideally be optimized away, but often isn't.)
878macro_rules! generate_state {
879    ($state: ident, $state_machine: tt, $f: expr) => {
880        loop {
881            match $f {
882                Action::None => continue,
883                Action::Jump(new_state) => {
884                    $state = new_state;
885                    continue $state_machine;
886                },
887                Action::End(result) => break $state_machine result,
888            }
889        }
890    };
891}
892
893#[derive(Copy, Clone)]
894struct LocalVars {
895    pub bit_buf: BitBuffer,
896    pub num_bits: u32,
897    pub dist: u32,
898    pub counter: u32,
899    pub num_extra: u8,
900}
901
902#[inline]
903fn transfer(
904    out_slice: &mut [u8],
905    mut source_pos: usize,
906    mut out_pos: usize,
907    match_len: usize,
908    out_buf_size_mask: usize,
909) {
910    // special case that comes up surprisingly often. in the case that `source_pos`
911    // is 1 less than `out_pos`, we can say that the entire range will be the same
912    // value and optimize this to be a simple `memset`
913    let source_diff = if source_pos > out_pos {
914        source_pos - out_pos
915    } else {
916        out_pos - source_pos
917    };
918    if out_buf_size_mask == usize::MAX && source_diff == 1 && out_pos > source_pos {
919        let init = out_slice[out_pos - 1];
920        let end = (match_len >> 2) * 4 + out_pos;
921
922        out_slice[out_pos..end].fill(init);
923        out_pos = end;
924        source_pos = end - 1;
925    // if the difference between `source_pos` and `out_pos` is greater than 3, we
926    // can do slightly better than the naive case by copying everything at once
927    } else if out_buf_size_mask == usize::MAX && source_diff >= 4 && out_pos > source_pos {
928        for _ in 0..match_len >> 2 {
929            out_slice.copy_within(source_pos..=source_pos + 3, out_pos);
930            source_pos += 4;
931            out_pos += 4;
932        }
933    } else {
934        for _ in 0..match_len >> 2 {
935            out_slice[out_pos] = out_slice[source_pos & out_buf_size_mask];
936            out_slice[out_pos + 1] = out_slice[(source_pos + 1) & out_buf_size_mask];
937            out_slice[out_pos + 2] = out_slice[(source_pos + 2) & out_buf_size_mask];
938            out_slice[out_pos + 3] = out_slice[(source_pos + 3) & out_buf_size_mask];
939            source_pos += 4;
940            out_pos += 4;
941        }
942    }
943
944    match match_len & 3 {
945        0 => (),
946        1 => out_slice[out_pos] = out_slice[source_pos & out_buf_size_mask],
947        2 => {
948            out_slice[out_pos] = out_slice[source_pos & out_buf_size_mask];
949            out_slice[out_pos + 1] = out_slice[(source_pos + 1) & out_buf_size_mask];
950        }
951        3 => {
952            out_slice[out_pos] = out_slice[source_pos & out_buf_size_mask];
953            out_slice[out_pos + 1] = out_slice[(source_pos + 1) & out_buf_size_mask];
954            out_slice[out_pos + 2] = out_slice[(source_pos + 2) & out_buf_size_mask];
955        }
956        _ => unreachable!(),
957    }
958}
959
960/// Presumes that there is at least match_len bytes in output left.
961#[inline]
962fn apply_match(
963    out_slice: &mut [u8],
964    out_pos: usize,
965    dist: usize,
966    match_len: usize,
967    out_buf_size_mask: usize,
968) {
969    debug_assert!(out_pos.checked_add(match_len).unwrap() <= out_slice.len());
970
971    let source_pos = out_pos.wrapping_sub(dist) & out_buf_size_mask;
972
973    if match_len == 3 {
974        let out_slice = Cell::from_mut(out_slice).as_slice_of_cells();
975        if let Some(dst) = out_slice.get(out_pos..out_pos + 3) {
976            // Moving bounds checks before any memory mutation allows the optimizer
977            // combine them together.
978            let src = out_slice
979                .get(source_pos)
980                .zip(out_slice.get((source_pos + 1) & out_buf_size_mask))
981                .zip(out_slice.get((source_pos + 2) & out_buf_size_mask));
982            if let Some(((a, b), c)) = src {
983                // For correctness, the memory reads and writes have to be interleaved.
984                // Cells make it possible for read and write references to overlap.
985                dst[0].set(a.get());
986                dst[1].set(b.get());
987                dst[2].set(c.get());
988            }
989        }
990        return;
991    }
992
993    if cfg!(not(any(target_arch = "x86", target_arch = "x86_64"))) {
994        // We are not on x86 so copy manually.
995        transfer(out_slice, source_pos, out_pos, match_len, out_buf_size_mask);
996        return;
997    }
998
999    if source_pos >= out_pos && (source_pos - out_pos) < match_len {
1000        transfer(out_slice, source_pos, out_pos, match_len, out_buf_size_mask);
1001    } else if match_len <= dist && source_pos + match_len < out_slice.len() {
1002        // Destination and source segments does not intersect and source does not wrap.
1003        // TODO: An invalid before start of data wrapping match reached here before
1004        // it was fixed (it wrapped around and ended overlapping again)- need
1005        // to check that we are not wrapping here.
1006        if source_pos < out_pos {
1007            let (from_slice, to_slice) = out_slice.split_at_mut(out_pos);
1008            to_slice[..match_len].copy_from_slice(&from_slice[source_pos..source_pos + match_len]);
1009        } else {
1010            let (to_slice, from_slice) = out_slice.split_at_mut(source_pos);
1011            to_slice[out_pos..out_pos + match_len].copy_from_slice(&from_slice[..match_len]);
1012        }
1013    } else {
1014        transfer(out_slice, source_pos, out_pos, match_len, out_buf_size_mask);
1015    }
1016}
1017
1018/// Fast inner decompression loop which is run  while there is at least
1019/// 259 bytes left in the output buffer, and at least 6 bytes left in the input buffer
1020/// (The maximum one match would need + 1).
1021///
1022/// This was inspired by a similar optimization in zlib, which uses this info to do
1023/// faster unchecked copies of multiple bytes at a time.
1024/// Currently we don't do this here, but this function does avoid having to jump through the
1025/// big match loop on each state change(as rust does not have fallthrough or gotos at the moment),
1026/// and already improves decompression speed a fair bit.
1027fn decompress_fast(
1028    r: &mut DecompressorOxide,
1029    in_iter: &mut InputWrapper,
1030    out_buf: &mut OutputBuffer,
1031    flags: u32,
1032    local_vars: &mut LocalVars,
1033    out_buf_size_mask: usize,
1034) -> (TINFLStatus, State) {
1035    // Make a local copy of the most used variables, to avoid having to update and read from values
1036    // in a random memory location and to encourage more register use.
1037    let mut l = *local_vars;
1038    let mut state;
1039
1040    let status: TINFLStatus = 'o: loop {
1041        state = State::DecodeLitlen;
1042        loop {
1043            // This function assumes that there is at least 259 bytes left in the output buffer,
1044            // and that there is at least 14 bytes left in the input buffer. 14 input bytes:
1045            // 15 (prev lit) + 15 (length) + 5 (length extra) + 15 (dist)
1046            // + 29 + 32 (left in bit buf, including last 13 dist extra) = 111 bits < 14 bytes
1047            // We need the one extra byte as we may write one length and one full match
1048            // before checking again.
1049            if out_buf.bytes_left() < 259 || in_iter.bytes_left() < 14 {
1050                state = State::DecodeLitlen;
1051                break 'o TINFLStatus::Done;
1052            }
1053
1054            fill_bit_buffer(&mut l, in_iter);
1055
1056            let (symbol, code_len) = r.tables[LITLEN_TABLE].lookup(l.bit_buf);
1057            l.counter = symbol as u32;
1058            l.bit_buf >>= code_len;
1059            l.num_bits -= code_len;
1060
1061            if (l.counter & 256) != 0 {
1062                // The symbol is not a literal.
1063                break;
1064            } else {
1065                // If we have a 32-bit buffer we need to read another two bytes now
1066                // to have enough bits to keep going.
1067                if cfg!(not(target_pointer_width = "64")) {
1068                    fill_bit_buffer(&mut l, in_iter);
1069                }
1070
1071                let (symbol, code_len) = r.tables[LITLEN_TABLE].lookup(l.bit_buf);
1072                l.bit_buf >>= code_len;
1073                l.num_bits -= code_len;
1074                // The previous symbol was a literal, so write it directly and check
1075                // the next one.
1076                out_buf.write_byte(l.counter as u8);
1077                if (symbol & 256) != 0 {
1078                    l.counter = symbol as u32;
1079                    // The symbol is a length value.
1080                    break;
1081                } else {
1082                    // The symbol is a literal, so write it directly and continue.
1083                    out_buf.write_byte(symbol as u8);
1084                }
1085            }
1086        }
1087
1088        // Mask the top bits since they may contain length info.
1089        l.counter &= 511;
1090        if l.counter == 256 {
1091            // We hit the end of block symbol.
1092            state.begin(BlockDone);
1093            break 'o TINFLStatus::Done;
1094        } else if l.counter > 285 {
1095            // Invalid code.
1096            // We already verified earlier that the code is > 256.
1097            state.begin(InvalidLitlen);
1098            break 'o TINFLStatus::Failed;
1099        } else {
1100            // The symbol was a length code.
1101            // # Optimization
1102            // Mask the value to avoid bounds checks
1103            // While the maximum is checked, the compiler isn't able to know that the
1104            // value won't wrap around here.
1105            l.num_extra = LENGTH_EXTRA[(l.counter - 257) as usize & BASE_EXTRA_MASK];
1106            l.counter = u32::from(LENGTH_BASE[(l.counter - 257) as usize & BASE_EXTRA_MASK]);
1107            // Length and distance codes have a number of extra bits depending on
1108            // the base, which together with the base gives us the exact value.
1109
1110            // We need to make sure we have at least 33 (so min 5 bytes) bits in the buffer at this spot.
1111            fill_bit_buffer(&mut l, in_iter);
1112            if l.num_extra != 0 {
1113                let extra_bits = l.bit_buf & ((1 << l.num_extra) - 1);
1114                l.bit_buf >>= l.num_extra;
1115                l.num_bits -= u32::from(l.num_extra);
1116                l.counter += extra_bits as u32;
1117            }
1118
1119            // We found a length code, so a distance code should follow.
1120
1121            if cfg!(not(target_pointer_width = "64")) {
1122                fill_bit_buffer(&mut l, in_iter);
1123            }
1124
1125            let (mut symbol, code_len) = r.tables[DIST_TABLE].lookup(l.bit_buf);
1126            symbol &= 511;
1127            l.bit_buf >>= code_len;
1128            l.num_bits -= code_len;
1129            if symbol > 29 {
1130                state.begin(InvalidDist);
1131                break 'o TINFLStatus::Failed;
1132            }
1133
1134            l.num_extra = num_extra_bits_for_distance_code(symbol as u8);
1135            l.dist = u32::from(DIST_BASE[symbol as usize]);
1136
1137            if l.num_extra != 0 {
1138                fill_bit_buffer(&mut l, in_iter);
1139                let extra_bits = l.bit_buf & ((1 << l.num_extra) - 1);
1140                l.bit_buf >>= l.num_extra;
1141                l.num_bits -= u32::from(l.num_extra);
1142                l.dist += extra_bits as u32;
1143            }
1144
1145            let position = out_buf.position();
1146            if (l.dist as usize > out_buf.position()
1147                && (flags & TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF != 0))
1148                || (l.dist as usize > out_buf.get_ref().len())
1149            {
1150                // We encountered a distance that refers a position before
1151                // the start of the decoded data, so we can't continue.
1152                state.begin(DistanceOutOfBounds);
1153                break TINFLStatus::Failed;
1154            }
1155
1156            apply_match(
1157                out_buf.get_mut(),
1158                position,
1159                l.dist as usize,
1160                l.counter as usize,
1161                out_buf_size_mask,
1162            );
1163
1164            out_buf.set_position(position + l.counter as usize);
1165        }
1166    };
1167
1168    *local_vars = l;
1169    (status, state)
1170}
1171
1172/// Main decompression function. Keeps decompressing data from `in_buf` until the `in_buf` is
1173/// empty, `out` is full, the end of the deflate stream is hit, or there is an error in the
1174/// deflate stream.
1175///
1176/// # Arguments
1177///
1178/// `r` is a [`DecompressorOxide`] struct with the state of this stream.
1179///
1180/// `in_buf` is a reference to the compressed data that is to be decompressed. The decompressor will
1181/// start at the first byte of this buffer.
1182///
1183/// `out` is a reference to the buffer that will store the decompressed data, and that
1184/// stores previously decompressed data if any.
1185///
1186/// * The offset given by `out_pos` indicates where in the output buffer slice writing should start.
1187/// * If [`TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF`] is not set, the output buffer is used in a
1188///   wrapping manner, and it's size is required to be a power of 2.
1189/// * The decompression function normally needs access to 32KiB of the previously decompressed data
1190///   (or to the beginning of the decompressed data if less than 32KiB has been decompressed.)
1191///     - If this data is not available, decompression may fail.
1192///     - Some deflate compressors allow specifying a window size which limits match distances to
1193///       less than this, or alternatively an RLE mode where matches will only refer to the previous byte
1194///       and thus allows a smaller output buffer. The window size can be specified in the zlib
1195///       header structure, however, the header data should not be relied on to be correct.
1196///
1197/// `flags` indicates settings and status to the decompression function.
1198/// * The [`TINFL_FLAG_HAS_MORE_INPUT`] has to be specified if more compressed data is to be provided
1199///   in a subsequent call to this function.
1200/// * See the the [`inflate_flags`] module for details on other flags.
1201///
1202/// # Returns
1203///
1204/// Returns a tuple containing the status of the compressor, the number of input bytes read, and the
1205/// number of bytes output to `out`.
1206///
1207/// This function shouldn't panic pending any bugs.
1208pub fn decompress(
1209    r: &mut DecompressorOxide,
1210    in_buf: &[u8],
1211    out: &mut [u8],
1212    out_pos: usize,
1213    flags: u32,
1214) -> (TINFLStatus, usize, usize) {
1215    let out_buf_size_mask = if flags & TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF != 0 {
1216        usize::MAX
1217    } else {
1218        // In the case of zero len, any attempt to write would produce HasMoreOutput,
1219        // so to gracefully process the case of there really being no output,
1220        // set the mask to all zeros.
1221        out.len().saturating_sub(1)
1222    };
1223
1224    // Ensure the output buffer's size is a power of 2, unless the output buffer
1225    // is large enough to hold the entire output file (in which case it doesn't
1226    // matter).
1227    // Also make sure that the output buffer position is not past the end of the output buffer.
1228    if (out_buf_size_mask.wrapping_add(1) & out_buf_size_mask) != 0 || out_pos > out.len() {
1229        return (TINFLStatus::BadParam, 0, 0);
1230    }
1231
1232    let mut in_iter = InputWrapper::from_slice(in_buf);
1233
1234    let mut state = r.state;
1235
1236    let mut out_buf = OutputBuffer::from_slice_and_pos(out, out_pos);
1237
1238    // Make a local copy of the important variables here so we can work with them on the stack.
1239    let mut l = LocalVars {
1240        bit_buf: r.bit_buf,
1241        num_bits: r.num_bits,
1242        dist: r.dist,
1243        counter: r.counter,
1244        num_extra: r.num_extra,
1245    };
1246
1247    let mut status = 'state_machine: loop {
1248        match state {
1249            Start => generate_state!(state, 'state_machine, {
1250                l.bit_buf = 0;
1251                l.num_bits = 0;
1252                l.dist = 0;
1253                l.counter = 0;
1254                l.num_extra = 0;
1255                r.z_header0 = 0;
1256                r.z_header1 = 0;
1257                r.z_adler32 = 1;
1258                r.check_adler32 = 1;
1259                if flags & TINFL_FLAG_PARSE_ZLIB_HEADER != 0 {
1260                    Action::Jump(State::ReadZlibCmf)
1261                } else {
1262                    Action::Jump(State::ReadBlockHeader)
1263                }
1264            }),
1265
1266            ReadZlibCmf => generate_state!(state, 'state_machine, {
1267                read_byte(&mut in_iter, flags, |cmf| {
1268                    r.z_header0 = u32::from(cmf);
1269                    Action::Jump(State::ReadZlibFlg)
1270                })
1271            }),
1272
1273            ReadZlibFlg => generate_state!(state, 'state_machine, {
1274                read_byte(&mut in_iter, flags, |flg| {
1275                    r.z_header1 = u32::from(flg);
1276                    validate_zlib_header(r.z_header0, r.z_header1, flags, out_buf_size_mask)
1277                })
1278            }),
1279
1280            // Read the block header and jump to the relevant section depending on the block type.
1281            ReadBlockHeader => generate_state!(state, 'state_machine, {
1282                read_bits(&mut l, 3, &mut in_iter, flags, |l, bits| {
1283                    r.finish = (bits & 1) as u8;
1284                    r.block_type = ((bits >> 1) & 3) as u8;
1285                    match r.block_type {
1286                        0 => Action::Jump(BlockTypeNoCompression),
1287                        1 => {
1288                            start_static_table(r);
1289                            init_tree(r, l).unwrap_or(Action::End(TINFLStatus::Failed))
1290                        },
1291                        2 => {
1292                            l.counter = 0;
1293                            Action::Jump(ReadTableSizes)
1294                        },
1295                        3 => Action::Jump(BlockTypeUnexpected),
1296                        _ => unreachable!()
1297                    }
1298                })
1299            }),
1300
1301            // Raw/Stored/uncompressed block.
1302            BlockTypeNoCompression => generate_state!(state, 'state_machine, {
1303                pad_to_bytes(&mut l, &mut in_iter, flags, |l| {
1304                    l.counter = 0;
1305                    Action::Jump(RawHeader)
1306                })
1307            }),
1308
1309            // Check that the raw block header is correct.
1310            RawHeader => generate_state!(state, 'state_machine, {
1311                if l.counter < 4 {
1312                    // Read block length and block length check.
1313                    if l.num_bits != 0 {
1314                        read_bits(&mut l, 8, &mut in_iter, flags, |l, bits| {
1315                            r.raw_header[l.counter as usize] = bits as u8;
1316                            l.counter += 1;
1317                            Action::None
1318                        })
1319                    } else {
1320                        read_byte(&mut in_iter, flags, |byte| {
1321                            r.raw_header[l.counter as usize] = byte;
1322                            l.counter += 1;
1323                            Action::None
1324                        })
1325                    }
1326                } else {
1327                    // Check if the length value of a raw block is correct.
1328                    // The 2 first (2-byte) words in a raw header are the length and the
1329                    // ones complement of the length.
1330                    let length = u16::from(r.raw_header[0]) | (u16::from(r.raw_header[1]) << 8);
1331                    let check = u16::from(r.raw_header[2]) | (u16::from(r.raw_header[3]) << 8);
1332                    let valid = length == !check;
1333                    l.counter = length.into();
1334
1335                    if !valid {
1336                        Action::Jump(BadRawLength)
1337                    } else if l.counter == 0 {
1338                        // Empty raw block. Sometimes used for synchronization.
1339                        Action::Jump(BlockDone)
1340                    } else if l.num_bits != 0 {
1341                        // There is some data in the bit buffer, so we need to write that first.
1342                        Action::Jump(RawReadFirstByte)
1343                    } else {
1344                        // The bit buffer is empty, so memcpy the rest of the uncompressed data from
1345                        // the block.
1346                        Action::Jump(RawMemcpy1)
1347                    }
1348                }
1349            }),
1350
1351            // Read the byte from the bit buffer.
1352            RawReadFirstByte => generate_state!(state, 'state_machine, {
1353                read_bits(&mut l, 8, &mut in_iter, flags, |l, bits| {
1354                    l.dist = bits as u32;
1355                    Action::Jump(RawStoreFirstByte)
1356                })
1357            }),
1358
1359            // Write the byte we just read to the output buffer.
1360            RawStoreFirstByte => generate_state!(state, 'state_machine, {
1361                if out_buf.bytes_left() == 0 {
1362                    Action::End(TINFLStatus::HasMoreOutput)
1363                } else {
1364                    out_buf.write_byte(l.dist as u8);
1365                    l.counter -= 1;
1366                    if l.counter == 0 || l.num_bits == 0 {
1367                        Action::Jump(RawMemcpy1)
1368                    } else {
1369                        // There is still some data left in the bit buffer that needs to be output.
1370                        // TODO: Changed this to jump to `RawReadfirstbyte` rather than
1371                        // `RawStoreFirstByte` as that seemed to be the correct path, but this
1372                        // needs testing.
1373                        Action::Jump(RawReadFirstByte)
1374                    }
1375                }
1376            }),
1377
1378            RawMemcpy1 => generate_state!(state, 'state_machine, {
1379                if l.counter == 0 {
1380                    Action::Jump(BlockDone)
1381                } else if out_buf.bytes_left() == 0 {
1382                    Action::End(TINFLStatus::HasMoreOutput)
1383                } else {
1384                    Action::Jump(RawMemcpy2)
1385                }
1386            }),
1387
1388            RawMemcpy2 => generate_state!(state, 'state_machine, {
1389                if in_iter.bytes_left() > 0 {
1390                    // Copy as many raw bytes as possible from the input to the output using memcpy.
1391                    // Raw block lengths are limited to 64 * 1024, so casting through usize and u32
1392                    // is not an issue.
1393                    let space_left = out_buf.bytes_left();
1394                    let bytes_to_copy = cmp::min(cmp::min(
1395                        space_left,
1396                        in_iter.bytes_left()),
1397                        l.counter as usize
1398                    );
1399
1400                    out_buf.write_slice(&in_iter.as_slice()[..bytes_to_copy]);
1401
1402                    in_iter.advance(bytes_to_copy);
1403                    l.counter -= bytes_to_copy as u32;
1404                    Action::Jump(RawMemcpy1)
1405                } else {
1406                    end_of_input(flags)
1407                }
1408            }),
1409
1410            // Read how many huffman codes/symbols are used for each table.
1411            ReadTableSizes => generate_state!(state, 'state_machine, {
1412                if l.counter < 3 {
1413                    let num_bits = [5, 5, 4][l.counter as usize];
1414                    read_bits(&mut l, num_bits, &mut in_iter, flags, |l, bits| {
1415                        r.table_sizes[l.counter as usize] =
1416                            bits as u16 + MIN_TABLE_SIZES[l.counter as usize];
1417                        l.counter += 1;
1418                        Action::None
1419                    })
1420                } else {
1421                    r.code_size_huffman.fill(0);
1422                    l.counter = 0;
1423                    // Check that the litlen and distance are within spec.
1424                    // litlen table should be <=286 acc to the RFC and
1425                    // additionally zlib rejects dist table sizes larger than 30.
1426                    // NOTE this the final sizes after adding back predefined values, not
1427                    // raw value in the data.
1428                    // See miniz_oxide issue #130 and https://github.com/madler/zlib/issues/82.
1429                    if r.table_sizes[LITLEN_TABLE] <= 286 && r.table_sizes[DIST_TABLE] <= 30 {
1430                        Action::Jump(ReadHufflenTableCodeSize)
1431                    }
1432                    else {
1433                        Action::Jump(BadDistOrLiteralTableLength)
1434                    }
1435                }
1436            }),
1437
1438            // Read the 3-bit lengths of the huffman codes describing the huffman code lengths used
1439            // to decode the lengths of the main tables.
1440            ReadHufflenTableCodeSize => generate_state!(state, 'state_machine, {
1441                if l.counter < r.table_sizes[HUFFLEN_TABLE].into() {
1442                    read_bits(&mut l, 3, &mut in_iter, flags, |l, bits| {
1443                        // These lengths are not stored in a normal ascending order, but rather one
1444                        // specified by the deflate specification intended to put the most used
1445                        // values at the front as trailing zero lengths do not have to be stored.
1446                        r.code_size_huffman[HUFFMAN_LENGTH_ORDER[l.counter as usize] as usize] =
1447                                bits as u8;
1448                        l.counter += 1;
1449                        Action::None
1450                    })
1451                } else {
1452                    r.table_sizes[HUFFLEN_TABLE] = MAX_HUFF_SYMBOLS_2 as u16;
1453                    init_tree(r, &mut l).unwrap_or(Action::End(TINFLStatus::Failed))
1454                }
1455            }),
1456
1457            ReadLitlenDistTablesCodeSize => generate_state!(state, 'state_machine, {
1458                if l.counter < u32::from(r.table_sizes[LITLEN_TABLE]) + u32::from(r.table_sizes[DIST_TABLE]) {
1459                    decode_huffman_code(
1460                        r, &mut l, HUFFLEN_TABLE,
1461                        flags, &mut in_iter, |r, l, symbol| {
1462                            l.dist = symbol as u32;
1463                            if l.dist < 16 {
1464                                r.len_codes[l.counter as usize] = l.dist as u8;
1465                                l.counter += 1;
1466                                Action::None
1467                            } else if l.dist == 16 && l.counter == 0 {
1468                                Action::Jump(BadCodeSizeDistPrevLookup)
1469                            } else {
1470                                l.num_extra = [2, 3, 7][l.dist as usize - 16];
1471                                Action::Jump(ReadExtraBitsCodeSize)
1472                            }
1473                        }
1474                    )
1475                } else if l.counter != u32::from(r.table_sizes[LITLEN_TABLE]) + u32::from(r.table_sizes[DIST_TABLE]) {
1476                    Action::Jump(BadCodeSizeSum)
1477                } else {
1478                    r.code_size_literal[..r.table_sizes[LITLEN_TABLE] as usize]
1479                        .copy_from_slice(&r.len_codes[..r.table_sizes[LITLEN_TABLE] as usize]);
1480
1481                    let dist_table_start = r.table_sizes[LITLEN_TABLE] as usize;
1482                    let dist_table_end = (r.table_sizes[LITLEN_TABLE] +
1483                                          r.table_sizes[DIST_TABLE]) as usize;
1484                    r.code_size_dist[..r.table_sizes[DIST_TABLE] as usize]
1485                        .copy_from_slice(&r.len_codes[dist_table_start..dist_table_end]);
1486
1487                    r.block_type -= 1;
1488                    init_tree(r, &mut l).unwrap_or(Action::End(TINFLStatus::Failed))
1489                }
1490            }),
1491
1492            ReadExtraBitsCodeSize => generate_state!(state, 'state_machine, {
1493                let num_extra = l.num_extra.into();
1494                read_bits(&mut l, num_extra, &mut in_iter, flags, |l, mut extra_bits| {
1495                    // Mask to avoid a bounds check.
1496                    extra_bits += [3, 3, 11][(l.dist as usize - 16) & 3];
1497                    let val = if l.dist == 16 {
1498                        r.len_codes[l.counter as usize - 1]
1499                    } else {
1500                        0
1501                    };
1502
1503                    r.len_codes[
1504                            l.counter as usize..l.counter as usize + extra_bits as usize
1505                        ].fill(val);
1506                    l.counter += extra_bits as u32;
1507                    Action::Jump(ReadLitlenDistTablesCodeSize)
1508                })
1509            }),
1510
1511            DecodeLitlen => generate_state!(state, 'state_machine, {
1512                if in_iter.bytes_left() < 4 || out_buf.bytes_left() < 2 {
1513                    // See if we can decode a literal with the data we have left.
1514                    // Jumps to next state (WriteSymbol) if successful.
1515                    decode_huffman_code(
1516                        r,
1517                        &mut l,
1518                        LITLEN_TABLE,
1519                        flags,
1520                        &mut in_iter,
1521                        |_r, l, symbol| {
1522                            l.counter = symbol as u32;
1523                            Action::Jump(WriteSymbol)
1524                        },
1525                    )
1526                } else if
1527                // If there is enough space, use the fast inner decompression
1528                // function.
1529                    out_buf.bytes_left() >= 259 &&
1530                    in_iter.bytes_left() >= 14
1531                {
1532                    let (status, new_state) = decompress_fast(
1533                        r,
1534                        &mut in_iter,
1535                        &mut out_buf,
1536                        flags,
1537                        &mut l,
1538                        out_buf_size_mask,
1539                    );
1540
1541                    state = new_state;
1542                    if status == TINFLStatus::Done {
1543                        Action::Jump(new_state)
1544                    } else {
1545                        Action::End(status)
1546                    }
1547                } else {
1548                    fill_bit_buffer(&mut l, &mut in_iter);
1549
1550                    let (symbol, code_len) = r.tables[LITLEN_TABLE].lookup(l.bit_buf);
1551
1552                    l.counter = symbol as u32;
1553                    l.bit_buf >>= code_len;
1554                    l.num_bits -= code_len;
1555
1556                    if (l.counter & 256) != 0 {
1557                        // The symbol is not a literal.
1558                        Action::Jump(HuffDecodeOuterLoop1)
1559                    } else {
1560                        // If we have a 32-bit buffer we need to read another two bytes now
1561                        // to have enough bits to keep going.
1562                        if cfg!(not(target_pointer_width = "64")) {
1563                            fill_bit_buffer(&mut l, &mut in_iter);
1564                        }
1565
1566                        let (symbol, code_len) = r.tables[LITLEN_TABLE].lookup(l.bit_buf);
1567
1568                            l.bit_buf >>= code_len;
1569                            l.num_bits -= code_len;
1570                            // The previous symbol was a literal, so write it directly and check
1571                            // the next one.
1572                            out_buf.write_byte(l.counter as u8);
1573                            if (symbol & 256) != 0 {
1574                                l.counter = symbol as u32;
1575                                // The symbol is a length value.
1576                                Action::Jump(HuffDecodeOuterLoop1)
1577                            } else {
1578                                // The symbol is a literal, so write it directly and continue.
1579                                out_buf.write_byte(symbol as u8);
1580                                Action::None
1581                            }
1582
1583                    }
1584
1585                }
1586            }),
1587
1588            WriteSymbol => generate_state!(state, 'state_machine, {
1589                if l.counter >= 256 {
1590                    Action::Jump(HuffDecodeOuterLoop1)
1591                } else if out_buf.bytes_left() > 0 {
1592                    out_buf.write_byte(l.counter as u8);
1593                    Action::Jump(DecodeLitlen)
1594                } else {
1595                    Action::End(TINFLStatus::HasMoreOutput)
1596                }
1597            }),
1598
1599            HuffDecodeOuterLoop1 => generate_state!(state, 'state_machine, {
1600                // Mask the top bits since they may contain length info.
1601                l.counter &= 511;
1602
1603                if l.counter
1604                    == 256 {
1605                    // We hit the end of block symbol.
1606                    Action::Jump(BlockDone)
1607                } else if l.counter > 285 {
1608                    // Invalid code.
1609                    // We already verified earlier that the code is > 256.
1610                    Action::Jump(InvalidLitlen)
1611                } else {
1612                    // # Optimization
1613                    // Mask the value to avoid bounds checks
1614                    // We could use get_unchecked later if can statically verify that
1615                    // this will never go out of bounds.
1616                    l.num_extra =
1617                        LENGTH_EXTRA[(l.counter - 257) as usize & BASE_EXTRA_MASK];
1618                    l.counter = u32::from(LENGTH_BASE[(l.counter - 257) as usize & BASE_EXTRA_MASK]);
1619                    // Length and distance codes have a number of extra bits depending on
1620                    // the base, which together with the base gives us the exact value.
1621                    if l.num_extra != 0 {
1622                        Action::Jump(ReadExtraBitsLitlen)
1623                    } else {
1624                        Action::Jump(DecodeDistance)
1625                    }
1626                }
1627            }),
1628
1629            ReadExtraBitsLitlen => generate_state!(state, 'state_machine, {
1630                let num_extra = l.num_extra.into();
1631                read_bits(&mut l, num_extra, &mut in_iter, flags, |l, extra_bits| {
1632                    l.counter += extra_bits as u32;
1633                    Action::Jump(DecodeDistance)
1634                })
1635            }),
1636
1637            DecodeDistance => generate_state!(state, 'state_machine, {
1638                // Try to read a huffman code from the input buffer and look up what
1639                // length code the decoded symbol refers to.
1640                decode_huffman_code(r, &mut l, DIST_TABLE, flags, &mut in_iter, |_r, l, symbol| {
1641                    // # Optimizaton - transform the value into usize here before the check so
1642                    // the compiler can optimize the bounds check later - ideally it should
1643                    // know that the value can't be negative from earlier in the
1644                    // decode_huffman_code function but it seems it may not be able
1645                    // to make the assumption that it can't be negative and thus
1646                    // overflow if it's converted after the check.
1647                    let symbol = symbol as usize;
1648                    if symbol > 29 {
1649                        // Invalid distance code.
1650                        return Action::Jump(InvalidDist)
1651                    }
1652                    l.num_extra = num_extra_bits_for_distance_code(symbol as u8);
1653                    l.dist = u32::from(DIST_BASE[symbol]);
1654                    if l.num_extra != 0 {
1655                        // ReadEXTRA_BITS_DISTACNE
1656                        Action::Jump(ReadExtraBitsDistance)
1657                    } else {
1658                        Action::Jump(HuffDecodeOuterLoop2)
1659                    }
1660                })
1661            }),
1662
1663            ReadExtraBitsDistance => generate_state!(state, 'state_machine, {
1664                let num_extra = l.num_extra.into();
1665                read_bits(&mut l, num_extra, &mut in_iter, flags, |l, extra_bits| {
1666                    l.dist += extra_bits as u32;
1667                    Action::Jump(HuffDecodeOuterLoop2)
1668                })
1669            }),
1670
1671            HuffDecodeOuterLoop2 => generate_state!(state, 'state_machine, {
1672                if (l.dist as usize > out_buf.position() &&
1673                    (flags & TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF != 0)) || (l.dist as usize > out_buf.get_ref().len())
1674                {
1675                    // We encountered a distance that refers a position before
1676                    // the start of the decoded data, so we can't continue.
1677                    Action::Jump(DistanceOutOfBounds)
1678                } else {
1679                    let out_pos = out_buf.position();
1680                    let source_pos = out_buf.position()
1681                        .wrapping_sub(l.dist as usize) & out_buf_size_mask;
1682
1683                    let out_len = out_buf.get_ref().len();
1684                    let match_end_pos = out_buf.position() + l.counter as usize;
1685
1686                    if match_end_pos > out_len ||
1687                        // miniz doesn't do this check here. Not sure how it makes sure
1688                        // that this case doesn't happen.
1689                        (source_pos >= out_pos && (source_pos - out_pos) < l.counter as usize)
1690                    {
1691                        // Not enough space for all of the data in the output buffer,
1692                        // so copy what we have space for.
1693                        if l.counter == 0 {
1694                            Action::Jump(DecodeLitlen)
1695                        } else {
1696                            Action::Jump(WriteLenBytesToEnd)
1697                        }
1698                    } else {
1699                        apply_match(
1700                            out_buf.get_mut(),
1701                            out_pos,
1702                            l.dist as usize,
1703                            l.counter as usize,
1704                            out_buf_size_mask
1705                        );
1706                        out_buf.set_position(out_pos + l.counter as usize);
1707                        Action::Jump(DecodeLitlen)
1708                    }
1709                }
1710            }),
1711
1712            WriteLenBytesToEnd => generate_state!(state, 'state_machine, {
1713                if out_buf.bytes_left() > 0 {
1714                    let out_pos = out_buf.position();
1715                    let source_pos = out_buf.position()
1716                        .wrapping_sub(l.dist as usize) & out_buf_size_mask;
1717
1718
1719                    let len = cmp::min(out_buf.bytes_left(), l.counter as usize);
1720
1721                    transfer(out_buf.get_mut(), source_pos, out_pos, len, out_buf_size_mask);
1722
1723                    out_buf.set_position(out_pos + len);
1724                    l.counter -= len as u32;
1725                    if l.counter == 0 {
1726                        Action::Jump(DecodeLitlen)
1727                    } else {
1728                        Action::None
1729                    }
1730                } else {
1731                    Action::End(TINFLStatus::HasMoreOutput)
1732                }
1733            }),
1734
1735            BlockDone => generate_state!(state, 'state_machine, {
1736                // End once we've read the last block.
1737                if r.finish != 0 {
1738                    pad_to_bytes(&mut l, &mut in_iter, flags, |_| Action::None);
1739
1740                    let in_consumed = in_buf.len() - in_iter.bytes_left();
1741                    let undo = undo_bytes(&mut l, in_consumed as u32) as usize;
1742                    in_iter = InputWrapper::from_slice(in_buf[in_consumed - undo..].iter().as_slice());
1743
1744                    l.bit_buf &= ((1 as BitBuffer) << l.num_bits) - 1;
1745                    debug_assert_eq!(l.num_bits, 0);
1746
1747                    if flags & TINFL_FLAG_PARSE_ZLIB_HEADER != 0 {
1748                        l.counter = 0;
1749                        Action::Jump(ReadAdler32)
1750                    } else {
1751                        Action::Jump(DoneForever)
1752                    }
1753                } else {
1754                    Action::Jump(ReadBlockHeader)
1755                }
1756            }),
1757
1758            ReadAdler32 => generate_state!(state, 'state_machine, {
1759                if l.counter < 4 {
1760                    if l.num_bits != 0 {
1761                        read_bits(&mut l, 8, &mut in_iter, flags, |l, bits| {
1762                            r.z_adler32 <<= 8;
1763                            r.z_adler32 |= bits as u32;
1764                            l.counter += 1;
1765                            Action::None
1766                        })
1767                    } else {
1768                        read_byte(&mut in_iter, flags, |byte| {
1769                            r.z_adler32 <<= 8;
1770                            r.z_adler32 |= u32::from(byte);
1771                            l.counter += 1;
1772                            Action::None
1773                        })
1774                    }
1775                } else {
1776                    Action::Jump(DoneForever)
1777                }
1778            }),
1779
1780            // We are done.
1781            DoneForever => break TINFLStatus::Done,
1782
1783            // Anything else indicates failure.
1784            // BadZlibHeader | BadRawLength | BadDistOrLiteralTableLength | BlockTypeUnexpected |
1785            // DistanceOutOfBounds |
1786            // BadTotalSymbols | BadCodeSizeDistPrevLookup | BadCodeSizeSum | InvalidLitlen |
1787            // InvalidDist | InvalidCodeLen
1788            _ => break TINFLStatus::Failed,
1789        };
1790    };
1791
1792    let in_undo = if status != TINFLStatus::NeedsMoreInput
1793        && status != TINFLStatus::FailedCannotMakeProgress
1794    {
1795        undo_bytes(&mut l, (in_buf.len() - in_iter.bytes_left()) as u32) as usize
1796    } else {
1797        0
1798    };
1799
1800    // Make sure HasMoreOutput overrides NeedsMoreInput if the output buffer is full.
1801    // (Unless the missing input is the adler32 value in which case we don't need to write anything.)
1802    // TODO: May want to see if we can do this in a better way.
1803    if status == TINFLStatus::NeedsMoreInput
1804        && out_buf.bytes_left() == 0
1805        && state != State::ReadAdler32
1806    {
1807        status = TINFLStatus::HasMoreOutput
1808    }
1809
1810    r.state = state;
1811    r.bit_buf = l.bit_buf;
1812    r.num_bits = l.num_bits;
1813    r.dist = l.dist;
1814    r.counter = l.counter;
1815    r.num_extra = l.num_extra;
1816
1817    r.bit_buf &= ((1 as BitBuffer) << r.num_bits) - 1;
1818
1819    // If this is a zlib stream, and update the adler32 checksum with the decompressed bytes if
1820    // requested.
1821    let need_adler = if (flags & TINFL_FLAG_IGNORE_ADLER32) == 0 {
1822        flags & (TINFL_FLAG_PARSE_ZLIB_HEADER | TINFL_FLAG_COMPUTE_ADLER32) != 0
1823    } else {
1824        // If TINFL_FLAG_IGNORE_ADLER32 is enabled, ignore the checksum.
1825        false
1826    };
1827    if need_adler && status as i32 >= 0 {
1828        let out_buf_pos = out_buf.position();
1829        r.check_adler32 = update_adler32(r.check_adler32, &out_buf.get_ref()[out_pos..out_buf_pos]);
1830
1831        // disabled so that random input from fuzzer would not be rejected early,
1832        // before it has a chance to reach interesting parts of code
1833        if !cfg!(fuzzing) {
1834            // Once we are done, check if the checksum matches with the one provided in the zlib header.
1835            if status == TINFLStatus::Done
1836                && flags & TINFL_FLAG_PARSE_ZLIB_HEADER != 0
1837                && r.check_adler32 != r.z_adler32
1838            {
1839                status = TINFLStatus::Adler32Mismatch;
1840            }
1841        }
1842    }
1843
1844    (
1845        status,
1846        in_buf.len() - in_iter.bytes_left() - in_undo,
1847        out_buf.position() - out_pos,
1848    )
1849}
1850
1851#[cfg(test)]
1852mod test {
1853    use super::*;
1854
1855    //TODO: Fix these.
1856
1857    fn tinfl_decompress_oxide<'i>(
1858        r: &mut DecompressorOxide,
1859        input_buffer: &'i [u8],
1860        output_buffer: &mut [u8],
1861        flags: u32,
1862    ) -> (TINFLStatus, &'i [u8], usize) {
1863        let (status, in_pos, out_pos) = decompress(r, input_buffer, output_buffer, 0, flags);
1864        (status, &input_buffer[in_pos..], out_pos)
1865    }
1866
1867    #[test]
1868    fn decompress_zlib() {
1869        let encoded = [
1870            120, 156, 243, 72, 205, 201, 201, 215, 81, 168, 202, 201, 76, 82, 4, 0, 27, 101, 4, 19,
1871        ];
1872        let flags = TINFL_FLAG_COMPUTE_ADLER32 | TINFL_FLAG_PARSE_ZLIB_HEADER;
1873
1874        let mut b = DecompressorOxide::new();
1875        const LEN: usize = 32;
1876        let mut b_buf = [0; LEN];
1877
1878        // This should fail with the out buffer being to small.
1879        let b_status = tinfl_decompress_oxide(&mut b, &encoded[..], &mut b_buf, flags);
1880
1881        assert_eq!(b_status.0, TINFLStatus::Failed);
1882
1883        let flags = flags | TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF;
1884
1885        b = DecompressorOxide::new();
1886
1887        // With TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF set this should no longer fail.
1888        let b_status = tinfl_decompress_oxide(&mut b, &encoded[..], &mut b_buf, flags);
1889
1890        assert_eq!(b_buf[..b_status.2], b"Hello, zlib!"[..]);
1891        assert_eq!(b_status.0, TINFLStatus::Done);
1892    }
1893
1894    #[cfg(feature = "with-alloc")]
1895    #[test]
1896    fn raw_block() {
1897        const LEN: usize = 64;
1898
1899        let text = b"Hello, zlib!";
1900        let encoded = {
1901            let len = text.len();
1902            let notlen = !len;
1903            let mut encoded = vec![
1904                1,
1905                len as u8,
1906                (len >> 8) as u8,
1907                notlen as u8,
1908                (notlen >> 8) as u8,
1909            ];
1910            encoded.extend_from_slice(&text[..]);
1911            encoded
1912        };
1913
1914        //let flags = TINFL_FLAG_COMPUTE_ADLER32 | TINFL_FLAG_PARSE_ZLIB_HEADER |
1915        let flags = TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF;
1916
1917        let mut b = DecompressorOxide::new();
1918
1919        let mut b_buf = [0; LEN];
1920
1921        let b_status = tinfl_decompress_oxide(&mut b, &encoded[..], &mut b_buf, flags);
1922        assert_eq!(b_buf[..b_status.2], text[..]);
1923        assert_eq!(b_status.0, TINFLStatus::Done);
1924    }
1925
1926    fn masked_lookup(table: &HuffmanTable, bit_buf: BitBuffer) -> (i32, u32) {
1927        let ret = table.lookup(bit_buf);
1928        (ret.0 & 511, ret.1)
1929    }
1930
1931    #[test]
1932    fn fixed_table_lookup() {
1933        let mut d = DecompressorOxide::new();
1934        d.block_type = 1;
1935        start_static_table(&mut d);
1936        let mut l = LocalVars {
1937            bit_buf: d.bit_buf,
1938            num_bits: d.num_bits,
1939            dist: d.dist,
1940            counter: d.counter,
1941            num_extra: d.num_extra,
1942        };
1943        init_tree(&mut d, &mut l).unwrap();
1944        let llt = &d.tables[LITLEN_TABLE];
1945        let dt = &d.tables[DIST_TABLE];
1946        assert_eq!(masked_lookup(llt, 0b00001100), (0, 8));
1947        assert_eq!(masked_lookup(llt, 0b00011110), (72, 8));
1948        assert_eq!(masked_lookup(llt, 0b01011110), (74, 8));
1949        assert_eq!(masked_lookup(llt, 0b11111101), (143, 8));
1950        assert_eq!(masked_lookup(llt, 0b000010011), (144, 9));
1951        assert_eq!(masked_lookup(llt, 0b111111111), (255, 9));
1952        assert_eq!(masked_lookup(llt, 0b00000000), (256, 7));
1953        assert_eq!(masked_lookup(llt, 0b1110100), (279, 7));
1954        assert_eq!(masked_lookup(llt, 0b00000011), (280, 8));
1955        assert_eq!(masked_lookup(llt, 0b11100011), (287, 8));
1956
1957        assert_eq!(masked_lookup(dt, 0), (0, 5));
1958        assert_eq!(masked_lookup(dt, 20), (5, 5));
1959    }
1960
1961    // Only run this test with alloc enabled as it uses a larger buffer.
1962    #[cfg(feature = "with-alloc")]
1963    fn check_result(input: &[u8], expected_status: TINFLStatus, expected_state: State, zlib: bool) {
1964        let mut r = DecompressorOxide::default();
1965        let mut output_buf = vec![0; 1024 * 32];
1966        let flags = if zlib {
1967            inflate_flags::TINFL_FLAG_PARSE_ZLIB_HEADER
1968        } else {
1969            0
1970        } | TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF
1971            | TINFL_FLAG_HAS_MORE_INPUT;
1972        let (d_status, _in_bytes, _out_bytes) =
1973            decompress(&mut r, input, &mut output_buf, 0, flags);
1974        assert_eq!(expected_status, d_status);
1975        assert_eq!(expected_state, r.state);
1976    }
1977
1978    #[cfg(feature = "with-alloc")]
1979    #[test]
1980    fn bogus_input() {
1981        use self::check_result as cr;
1982        const F: TINFLStatus = TINFLStatus::Failed;
1983        const OK: TINFLStatus = TINFLStatus::Done;
1984        // Bad CM.
1985        cr(&[0x77, 0x85], F, State::BadZlibHeader, true);
1986        // Bad window size (but check is correct).
1987        cr(&[0x88, 0x98], F, State::BadZlibHeader, true);
1988        // Bad check bits.
1989        cr(&[0x78, 0x98], F, State::BadZlibHeader, true);
1990
1991        // Too many code lengths. (From inflate library issues)
1992        cr(
1993            b"M\xff\xffM*\xad\xad\xad\xad\xad\xad\xad\xcd\xcd\xcdM",
1994            F,
1995            State::BadDistOrLiteralTableLength,
1996            false,
1997        );
1998
1999        // Bad CLEN (also from inflate library issues)
2000        cr(
2001            b"\xdd\xff\xff*M\x94ffffffffff",
2002            F,
2003            State::BadDistOrLiteralTableLength,
2004            false,
2005        );
2006
2007        // Port of inflate coverage tests from zlib-ng
2008        // https://github.com/Dead2/zlib-ng/blob/develop/test/infcover.c
2009        let c = |a, b, c| cr(a, b, c, false);
2010
2011        // Invalid uncompressed/raw block length.
2012        c(&[0, 0, 0, 0, 0], F, State::BadRawLength);
2013        // Ok empty uncompressed block.
2014        c(&[3, 0], OK, State::DoneForever);
2015        // Invalid block type.
2016        c(&[6], F, State::BlockTypeUnexpected);
2017        // Ok uncompressed block.
2018        c(&[1, 1, 0, 0xfe, 0xff, 0], OK, State::DoneForever);
2019        // Too many litlens, we handle this later than zlib, so this test won't
2020        // give the same result.
2021        //        c(&[0xfc, 0, 0], F, State::BadTotalSymbols);
2022        // Invalid set of code lengths - TODO Check if this is the correct error for this.
2023        c(&[4, 0, 0xfe, 0xff], F, State::BadTotalSymbols);
2024        // Invalid repeat in list of code lengths.
2025        // (Try to repeat a non-existent code.)
2026        c(&[4, 0, 0x24, 0x49, 0], F, State::BadCodeSizeDistPrevLookup);
2027        // Missing end of block code (should we have a separate error for this?) - fails on further input
2028        //    c(&[4, 0, 0x24, 0xe9, 0xff, 0x6d], F, State::BadTotalSymbols);
2029        // Invalid set of literals/lengths
2030        c(
2031            &[
2032                4, 0x80, 0x49, 0x92, 0x24, 0x49, 0x92, 0x24, 0x71, 0xff, 0xff, 0x93, 0x11, 0,
2033            ],
2034            F,
2035            State::BadTotalSymbols,
2036        );
2037        // Invalid set of distances _ needsmoreinput
2038        // c(&[4, 0x80, 0x49, 0x92, 0x24, 0x49, 0x92, 0x24, 0x0f, 0xb4, 0xff, 0xff, 0xc3, 0x84], F, State::BadTotalSymbols);
2039        // Invalid distance code
2040        c(&[2, 0x7e, 0xff, 0xff], F, State::InvalidDist);
2041
2042        // Distance refers to position before the start
2043        c(
2044            &[0x0c, 0xc0, 0x81, 0, 0, 0, 0, 0, 0x90, 0xff, 0x6b, 0x4, 0],
2045            F,
2046            State::DistanceOutOfBounds,
2047        );
2048
2049        // Trailer
2050        // Bad gzip trailer checksum GZip header not handled by miniz_oxide
2051        //cr(&[0x1f, 0x8b, 0x08 ,0 ,0 ,0 ,0 ,0 ,0 ,0 ,0x03, 0, 0, 0, 0, 0x01], F, State::BadCRC, false)
2052        // Bad gzip trailer length
2053        //cr(&[0x1f, 0x8b, 0x08 ,0 ,0 ,0 ,0 ,0 ,0 ,0 ,0x03, 0, 0, 0, 0, 0, 0, 0, 0, 0x01], F, State::BadCRC, false)
2054    }
2055
2056    #[test]
2057    fn empty_output_buffer_non_wrapping() {
2058        let encoded = [
2059            120, 156, 243, 72, 205, 201, 201, 215, 81, 168, 202, 201, 76, 82, 4, 0, 27, 101, 4, 19,
2060        ];
2061        let flags = TINFL_FLAG_COMPUTE_ADLER32
2062            | TINFL_FLAG_PARSE_ZLIB_HEADER
2063            | TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF;
2064        let mut r = DecompressorOxide::new();
2065        let mut output_buf: [u8; 0] = [];
2066        // Check that we handle an empty buffer properly and not panicking.
2067        // https://github.com/Frommi/miniz_oxide/issues/23
2068        let res = decompress(&mut r, &encoded, &mut output_buf, 0, flags);
2069        assert_eq!(res, (TINFLStatus::HasMoreOutput, 4, 0));
2070    }
2071
2072    #[test]
2073    fn empty_output_buffer_wrapping() {
2074        let encoded = [
2075            0x73, 0x49, 0x4d, 0xcb, 0x49, 0x2c, 0x49, 0x55, 0x00, 0x11, 0x00,
2076        ];
2077        let flags = TINFL_FLAG_COMPUTE_ADLER32;
2078        let mut r = DecompressorOxide::new();
2079        let mut output_buf: [u8; 0] = [];
2080        // Check that we handle an empty buffer properly and not panicking.
2081        // https://github.com/Frommi/miniz_oxide/issues/23
2082        let res = decompress(&mut r, &encoded, &mut output_buf, 0, flags);
2083        assert_eq!(res, (TINFLStatus::HasMoreOutput, 2, 0));
2084    }
2085
2086    #[test]
2087    fn dist_extra_bits() {
2088        use self::num_extra_bits_for_distance_code;
2089        // Number of extra bits for each distance code.
2090        const DIST_EXTRA: [u8; 29] = [
2091            0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12,
2092            12, 13,
2093        ];
2094
2095        for (i, &dist) in DIST_EXTRA.iter().enumerate() {
2096            assert_eq!(dist, num_extra_bits_for_distance_code(i as u8));
2097        }
2098    }
2099
2100    #[test]
2101    fn check_tree() {
2102        let mut r = DecompressorOxide::new();
2103        let mut l = LocalVars {
2104            bit_buf: 0,
2105            num_bits: 0,
2106            dist: 0,
2107            counter: 0,
2108            num_extra: 0,
2109        };
2110
2111        r.code_size_huffman[0] = 1;
2112        r.code_size_huffman[1] = 1;
2113        //r.code_size_huffman[2] = 3;
2114        //r.code_size_huffman[3] = 3;
2115        //r.code_size_huffman[1] = 4;
2116        r.block_type = HUFFLEN_TABLE as u8;
2117        r.table_sizes[HUFFLEN_TABLE] = 4;
2118        let res = init_tree(&mut r, &mut l).unwrap();
2119
2120        let status = match res {
2121            Action::Jump(s) => s,
2122            _ => {
2123                //println!("issue");
2124                return;
2125            }
2126        };
2127        //println!("status {:?}", status);
2128        assert!(status != BadTotalSymbols);
2129    }
2130}