1use alloc::boxed::Box;
4use core::convert::TryInto;
5use core::{cmp, mem};
6
7use super::super::*;
8use super::deflate_flags::*;
9use super::CompressionLevel;
10use crate::deflate::buffer::{
11 update_hash, HashBuffers, LocalBuf, LZ_CODE_BUF_SIZE, LZ_DICT_FULL_SIZE, LZ_HASH_BITS,
12 LZ_HASH_SHIFT, LZ_HASH_SIZE, OUT_BUF_SIZE,
13};
14use crate::deflate::stored::compress_stored;
15use crate::deflate::zlib;
16use crate::shared::{update_adler32, HUFFMAN_LENGTH_ORDER, MZ_ADLER32_INIT};
17use crate::DataFormat;
18
19type Result<T, E = Error> = core::result::Result<T, E>;
22pub(crate) struct Error {}
23
24pub(crate) const MAX_PROBES_MASK: u32 = 0xFFF;
25
26const MAX_SUPPORTED_HUFF_CODESIZE: usize = 15;
27
28#[rustfmt::skip]
30const LEN_SYM: [u16; 256] = [
31 257, 258, 259, 260, 261, 262, 263, 264, 265, 265, 266, 266, 267, 267, 268, 268,
32 269, 269, 269, 269, 270, 270, 270, 270, 271, 271, 271, 271, 272, 272, 272, 272,
33 273, 273, 273, 273, 273, 273, 273, 273, 274, 274, 274, 274, 274, 274, 274, 274,
34 275, 275, 275, 275, 275, 275, 275, 275, 276, 276, 276, 276, 276, 276, 276, 276,
35 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277,
36 278, 278, 278, 278, 278, 278, 278, 278, 278, 278, 278, 278, 278, 278, 278, 278,
37 279, 279, 279, 279, 279, 279, 279, 279, 279, 279, 279, 279, 279, 279, 279, 279,
38 280, 280, 280, 280, 280, 280, 280, 280, 280, 280, 280, 280, 280, 280, 280, 280,
39 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281,
40 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281,
41 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282,
42 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282,
43 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283,
44 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283,
45 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284,
46 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 285
47];
48
49#[rustfmt::skip]
51const LEN_EXTRA: [u8; 256] = [
52 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1,
53 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
54 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
55 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
56 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
57 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
58 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
59 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
60 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
61 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
62 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
63 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
64 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
65 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
66 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
67 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 0
68];
69
70#[rustfmt::skip]
72const SMALL_DIST_SYM: [u8; 512] = [
73 0, 1, 2, 3, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7,
74 8, 8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9,
75 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
76 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
77 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
78 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
79 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
80 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
81 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
82 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
83 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
84 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
85 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
86 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
87 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
88 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
89 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
90 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
91 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
92 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
93 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
94 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
95 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
96 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
97 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
98 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
99 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
100 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
101 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
102 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
103 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
104 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17
105];
106
107#[rustfmt::skip]
109const SMALL_DIST_EXTRA: [u8; 512] = [
110 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
111 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
112 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
113 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
114 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
115 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
116 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
117 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
118 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
119 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
120 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
121 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
122 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
123 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
124 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
125 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
126];
127
128#[rustfmt::skip]
130const LARGE_DIST_SYM: [u8; 128] = [
131 0, 0, 18, 19, 20, 20, 21, 21, 22, 22, 22, 22, 23, 23, 23, 23,
132 24, 24, 24, 24, 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25,
133 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
134 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
135 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
136 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
137 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
138 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29
139];
140
141#[rustfmt::skip]
143const LARGE_DIST_EXTRA: [u8; 128] = [
144 0, 0, 8, 8, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 10,
145 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
146 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
147 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
148 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
149 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
150 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
151 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13
152];
153
154#[rustfmt::skip]
155const BITMASKS: [u32; 17] = [
156 0x0000, 0x0001, 0x0003, 0x0007, 0x000F, 0x001F, 0x003F, 0x007F, 0x00FF,
157 0x01FF, 0x03FF, 0x07FF, 0x0FFF, 0x1FFF, 0x3FFF, 0x7FFF, 0xFFFF
158];
159
160pub(crate) const NUM_PROBES: [u16; 11] = [0, 1, 6, 32, 16, 32, 128, 256, 512, 768, 1500];
163
164#[derive(Copy, Clone)]
165struct SymFreq {
166 key: u16,
167 sym_index: u16,
168}
169
170pub mod deflate_flags {
171 pub const TDEFL_WRITE_ZLIB_HEADER: u32 = 0x0000_1000;
173 pub const TDEFL_COMPUTE_ADLER32: u32 = 0x0000_2000;
175 pub const TDEFL_GREEDY_PARSING_FLAG: u32 = 0x0000_4000;
178 pub const TDEFL_NONDETERMINISTIC_PARSING_FLAG: u32 = 0x0000_8000;
181 pub const TDEFL_RLE_MATCHES: u32 = 0x0001_0000;
183 pub const TDEFL_FILTER_MATCHES: u32 = 0x0002_0000;
185 pub const TDEFL_FORCE_ALL_STATIC_BLOCKS: u32 = 0x0004_0000;
188 pub const TDEFL_FORCE_ALL_RAW_BLOCKS: u32 = 0x0008_0000;
190}
191
192#[repr(i32)]
196#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
197pub enum CompressionStrategy {
198 Default = 0,
200 Filtered = 1,
202 HuffmanOnly = 2,
204 RLE = 3,
206 Fixed = 4,
209}
210
211impl From<CompressionStrategy> for i32 {
212 #[inline(always)]
213 fn from(value: CompressionStrategy) -> Self {
214 value as i32
215 }
216}
217
218#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
220pub enum TDEFLFlush {
221 None = 0,
225
226 Sync = 2,
228
229 Full = 3,
232
233 Finish = 4,
237}
238
239impl From<MZFlush> for TDEFLFlush {
240 fn from(flush: MZFlush) -> Self {
241 match flush {
242 MZFlush::None => TDEFLFlush::None,
243 MZFlush::Sync => TDEFLFlush::Sync,
244 MZFlush::Full => TDEFLFlush::Full,
245 MZFlush::Finish => TDEFLFlush::Finish,
246 _ => TDEFLFlush::None, }
248 }
249}
250
251impl TDEFLFlush {
252 pub const fn new(flush: i32) -> Result<Self, MZError> {
253 match flush {
254 0 => Ok(TDEFLFlush::None),
255 2 => Ok(TDEFLFlush::Sync),
256 3 => Ok(TDEFLFlush::Full),
257 4 => Ok(TDEFLFlush::Finish),
258 _ => Err(MZError::Param),
259 }
260 }
261}
262
263#[repr(i32)]
265#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
266pub enum TDEFLStatus {
267 BadParam = -2,
272
273 PutBufFailed = -1,
277
278 Okay = 0,
280
281 Done = 1,
285}
286
287const MAX_HUFF_SYMBOLS: usize = 288;
288const LEVEL1_HASH_SIZE_MASK: u32 = 4095;
290const MAX_HUFF_TABLES: usize = 3;
293const MAX_HUFF_SYMBOLS_0: usize = 288;
295const MAX_HUFF_SYMBOLS_1: usize = 32;
297const MAX_HUFF_SYMBOLS_2: usize = 19;
299pub(crate) const LZ_DICT_SIZE: usize = 32_768;
301pub(crate) const LZ_DICT_SIZE_MASK: usize = (LZ_DICT_SIZE as u32 - 1) as usize;
303pub(crate) const MIN_MATCH_LEN: u8 = 3;
305pub(crate) const MAX_MATCH_LEN: usize = 258;
307
308pub(crate) const DEFAULT_FLAGS: u32 = NUM_PROBES[4] as u32 | TDEFL_WRITE_ZLIB_HEADER;
309
310#[cfg(test)]
311#[inline]
312fn write_u16_le(val: u16, slice: &mut [u8], pos: usize) {
313 slice[pos] = val as u8;
314 slice[pos + 1] = (val >> 8) as u8;
315}
316
317#[inline]
319const fn read_u16_le<const N: usize>(slice: &[u8; N], pos: usize) -> u16 {
320 slice[pos] as u16 | ((slice[pos + 1] as u16) << 8)
322}
323
324pub struct CompressorOxide {
326 pub(crate) lz: LZOxide,
327 pub(crate) params: ParamsOxide,
328 pub(crate) huff: Box<HuffmanOxide>,
331 pub(crate) dict: DictOxide,
332}
333
334impl CompressorOxide {
335 pub fn new(flags: u32) -> Self {
340 CompressorOxide {
341 lz: LZOxide::new(),
342 params: ParamsOxide::new(flags),
343 huff: Box::default(),
344 dict: DictOxide::new(flags),
345 }
346 }
347
348 pub const fn adler32(&self) -> u32 {
350 self.params.adler32
351 }
352
353 pub const fn prev_return_status(&self) -> TDEFLStatus {
356 self.params.prev_return_status
357 }
358
359 pub const fn flags(&self) -> i32 {
364 self.params.flags as i32
365 }
366
367 pub const fn data_format(&self) -> DataFormat {
369 if (self.params.flags & TDEFL_WRITE_ZLIB_HEADER) != 0 {
370 DataFormat::Zlib
371 } else {
372 DataFormat::Raw
373 }
374 }
375
376 pub fn reset(&mut self) {
380 self.lz = LZOxide::new();
383 self.params.reset();
384 *self.huff = HuffmanOxide::default();
385 self.dict.reset();
386 }
387
388 pub fn set_compression_level(&mut self, level: CompressionLevel) {
394 let format = self.data_format();
395 self.set_format_and_level(format, level as u8);
396 }
397
398 pub fn set_compression_level_raw(&mut self, level: u8) {
404 let format = self.data_format();
405 self.set_format_and_level(format, level);
406 }
407
408 pub fn set_format_and_level(&mut self, data_format: DataFormat, level: u8) {
418 let flags = create_comp_flags_from_zip_params(
419 level.into(),
420 data_format.to_window_bits(),
421 CompressionStrategy::Default as i32,
422 );
423 self.params.update_flags(flags);
424 self.dict.update_flags(flags);
425 }
426}
427
428impl Default for CompressorOxide {
429 fn default() -> Self {
432 CompressorOxide {
433 lz: LZOxide::new(),
434 params: ParamsOxide::new(DEFAULT_FLAGS),
435 huff: Box::default(),
436 dict: DictOxide::new(DEFAULT_FLAGS),
437 }
438 }
439}
440
441pub struct CallbackFunc<'a> {
443 pub put_buf_func: &'a mut dyn FnMut(&[u8]) -> bool,
444}
445
446impl CallbackFunc<'_> {
447 fn flush_output(
448 &mut self,
449 saved_output: SavedOutputBufferOxide,
450 params: &mut ParamsOxide,
451 ) -> i32 {
452 let call_success = (self.put_buf_func)(¶ms.local_buf.b[0..saved_output.pos]);
456
457 if !call_success {
458 params.prev_return_status = TDEFLStatus::PutBufFailed;
459 return params.prev_return_status as i32;
460 }
461
462 params.flush_remaining as i32
463 }
464}
465
466struct CallbackBuf<'a> {
467 pub out_buf: &'a mut [u8],
468}
469
470impl CallbackBuf<'_> {
471 fn flush_output(
472 &mut self,
473 saved_output: SavedOutputBufferOxide,
474 params: &mut ParamsOxide,
475 ) -> i32 {
476 if saved_output.local {
477 let n = cmp::min(saved_output.pos, self.out_buf.len() - params.out_buf_ofs);
478 (self.out_buf[params.out_buf_ofs..params.out_buf_ofs + n])
479 .copy_from_slice(¶ms.local_buf.b[..n]);
480
481 params.out_buf_ofs += n;
482 if saved_output.pos != n {
483 params.flush_ofs = n as u32;
484 params.flush_remaining = (saved_output.pos - n) as u32;
485 }
486 } else {
487 params.out_buf_ofs += saved_output.pos;
488 }
489
490 params.flush_remaining as i32
491 }
492}
493
494enum CallbackOut<'a> {
495 Func(CallbackFunc<'a>),
496 Buf(CallbackBuf<'a>),
497}
498
499impl CallbackOut<'_> {
500 fn new_output_buffer<'b>(
501 &'b mut self,
502 local_buf: &'b mut [u8],
503 out_buf_ofs: usize,
504 ) -> OutputBufferOxide<'b> {
505 let is_local;
506 let buf_len = OUT_BUF_SIZE - 16;
507 let chosen_buffer = match *self {
508 CallbackOut::Buf(ref mut cb) if cb.out_buf.len() - out_buf_ofs >= OUT_BUF_SIZE => {
509 is_local = false;
510 &mut cb.out_buf[out_buf_ofs..out_buf_ofs + buf_len]
511 }
512 _ => {
513 is_local = true;
514 &mut local_buf[..buf_len]
515 }
516 };
517
518 OutputBufferOxide {
519 inner: chosen_buffer,
520 inner_pos: 0,
521 local: is_local,
522 bit_buffer: 0,
523 bits_in: 0,
524 }
525 }
526}
527
528pub(crate) struct CallbackOxide<'a> {
529 in_buf: Option<&'a [u8]>,
530 in_buf_size: Option<&'a mut usize>,
531 out_buf_size: Option<&'a mut usize>,
532 out: CallbackOut<'a>,
533}
534
535impl<'a> CallbackOxide<'a> {
536 fn new_callback_buf(in_buf: &'a [u8], out_buf: &'a mut [u8]) -> Self {
537 CallbackOxide {
538 in_buf: Some(in_buf),
539 in_buf_size: None,
540 out_buf_size: None,
541 out: CallbackOut::Buf(CallbackBuf { out_buf }),
542 }
543 }
544
545 fn new_callback_func(in_buf: &'a [u8], callback_func: CallbackFunc<'a>) -> Self {
546 CallbackOxide {
547 in_buf: Some(in_buf),
548 in_buf_size: None,
549 out_buf_size: None,
550 out: CallbackOut::Func(callback_func),
551 }
552 }
553
554 fn update_size(&mut self, in_size: Option<usize>, out_size: Option<usize>) {
555 if let (Some(in_size), Some(size)) = (in_size, self.in_buf_size.as_mut()) {
556 **size = in_size;
557 }
558
559 if let (Some(out_size), Some(size)) = (out_size, self.out_buf_size.as_mut()) {
560 **size = out_size
561 }
562 }
563
564 fn flush_output(
565 &mut self,
566 saved_output: SavedOutputBufferOxide,
567 params: &mut ParamsOxide,
568 ) -> i32 {
569 if saved_output.pos == 0 {
570 return params.flush_remaining as i32;
571 }
572
573 self.update_size(Some(params.src_pos), None);
574 match self.out {
575 CallbackOut::Func(ref mut cf) => cf.flush_output(saved_output, params),
576 CallbackOut::Buf(ref mut cb) => cb.flush_output(saved_output, params),
577 }
578 }
579
580 pub(crate) fn buf(&mut self) -> Option<&'a [u8]> {
581 self.in_buf
582 }
583}
584
585struct OutputBufferOxide<'a> {
586 pub inner: &'a mut [u8],
587 pub inner_pos: usize,
588 pub local: bool,
589
590 pub bit_buffer: u32,
591 pub bits_in: u32,
592}
593
594impl OutputBufferOxide<'_> {
595 fn put_bits(&mut self, bits: u32, len: u32) {
599 assert!(bits <= ((1u32 << len) - 1u32));
602 self.bit_buffer |= bits << self.bits_in;
603 self.bits_in += len;
604
605 while self.bits_in >= 8 {
606 self.inner[self.inner_pos] = self.bit_buffer as u8;
607 self.inner_pos += 1;
608 self.bit_buffer >>= 8;
609 self.bits_in -= 8;
610 }
611 }
612
613 #[inline]
614 fn put_bits_no_flush(&mut self, bits: u32, len: u32) {
617 self.bit_buffer |= bits << self.bits_in;
618 self.bits_in += len;
619 }
620
621 const fn save(&self) -> SavedOutputBufferOxide {
622 SavedOutputBufferOxide {
623 pos: self.inner_pos,
624 bit_buffer: self.bit_buffer,
625 bits_in: self.bits_in,
626 local: self.local,
627 }
628 }
629
630 fn load(&mut self, saved: SavedOutputBufferOxide) {
631 self.inner_pos = saved.pos;
632 self.bit_buffer = saved.bit_buffer;
633 self.bits_in = saved.bits_in;
634 self.local = saved.local;
635 }
636
637 #[inline]
638 fn pad_to_bytes(&mut self) {
641 if self.bits_in != 0 {
642 let len = 8 - self.bits_in;
643 self.put_bits(0, len);
644 }
645 }
646
647 #[inline]
648 fn write_bytes(&mut self, bytes: &[u8]) {
649 debug_assert_eq!(self.bits_in, 0);
650 self.inner[self.inner_pos..self.inner_pos + bytes.len()].copy_from_slice(bytes);
651 self.inner_pos += bytes.len();
652 }
653}
654
655struct SavedOutputBufferOxide {
656 pub pos: usize,
657 pub bit_buffer: u32,
658 pub bits_in: u32,
659 pub local: bool,
660}
661
662struct BitBuffer {
663 pub bit_buffer: u64,
664 pub bits_in: u32,
665}
666
667impl BitBuffer {
668 fn put_fast(&mut self, bits: u64, len: u32) {
669 self.bit_buffer |= bits << self.bits_in;
670 self.bits_in += len;
671 }
672
673 fn flush(&mut self, output: &mut OutputBufferOxide) -> Result<()> {
674 let pos = output.inner_pos;
675 {
676 let inner = &mut output.inner[pos..pos + 8];
678 let bytes = u64::to_le_bytes(self.bit_buffer);
679 inner.copy_from_slice(&bytes);
680 }
681 match output.inner_pos.checked_add((self.bits_in >> 3) as usize) {
682 Some(n) if n <= output.inner.len() => output.inner_pos = n,
683 _ => return Err(Error {}),
684 }
685 self.bit_buffer >>= self.bits_in & !7;
686 self.bits_in &= 7;
687 Ok(())
688 }
689}
690
691pub(crate) struct HuffmanOxide {
697 pub count: [[u16; MAX_HUFF_SYMBOLS]; MAX_HUFF_TABLES],
699 pub codes: [[u16; MAX_HUFF_SYMBOLS]; MAX_HUFF_TABLES],
701 pub code_sizes: [[u8; MAX_HUFF_SYMBOLS]; MAX_HUFF_TABLES],
703}
704
705const LITLEN_TABLE: usize = 0;
707const DIST_TABLE: usize = 1;
709const HUFF_CODES_TABLE: usize = 2;
711
712struct Rle {
714 pub z_count: u32,
715 pub repeat_count: u16,
716 pub prev_code_size: u8,
717}
718
719impl Rle {
720 fn prev_code_size(
721 &mut self,
722 packed_code_sizes: &mut [u8],
723 packed_pos: &mut usize,
724 h: &mut HuffmanOxide,
725 ) -> Result<()> {
726 let mut write = |buf| write(buf, packed_code_sizes, packed_pos);
727 let counts = &mut h.count[HUFF_CODES_TABLE];
728 if self.repeat_count != 0 {
729 if self.repeat_count < 3 {
730 counts[self.prev_code_size as usize] =
731 counts[self.prev_code_size as usize].wrapping_add(self.repeat_count);
732 let code = self.prev_code_size;
733 write(&[code, code, code][..self.repeat_count as usize])?;
734 } else {
735 counts[16] = counts[16].wrapping_add(1);
736 write(&[16, (self.repeat_count - 3) as u8][..])?;
737 }
738 self.repeat_count = 0;
739 }
740
741 Ok(())
742 }
743
744 fn zero_code_size(
745 &mut self,
746 packed_code_sizes: &mut [u8],
747 packed_pos: &mut usize,
748 h: &mut HuffmanOxide,
749 ) -> Result<()> {
750 let mut write = |buf| write(buf, packed_code_sizes, packed_pos);
751 let counts = &mut h.count[HUFF_CODES_TABLE];
752 if self.z_count != 0 {
753 if self.z_count < 3 {
754 counts[0] = counts[0].wrapping_add(self.z_count as u16);
755 write(&[0, 0, 0][..self.z_count as usize])?;
756 } else if self.z_count <= 10 {
757 counts[17] = counts[17].wrapping_add(1);
758 write(&[17, (self.z_count - 3) as u8][..])?;
759 } else {
760 counts[18] = counts[18].wrapping_add(1);
761 write(&[18, (self.z_count - 11) as u8][..])?;
762 }
763 self.z_count = 0;
764 }
765
766 Ok(())
767 }
768}
769
770fn write(src: &[u8], dst: &mut [u8], dst_pos: &mut usize) -> Result<()> {
771 match dst.get_mut(*dst_pos..*dst_pos + src.len()) {
772 Some(s) => s.copy_from_slice(src),
773 None => return Err(Error {}),
774 }
775 *dst_pos += src.len();
776 Ok(())
777}
778
779impl Default for HuffmanOxide {
780 fn default() -> Self {
781 HuffmanOxide {
782 count: [[0; MAX_HUFF_SYMBOLS]; MAX_HUFF_TABLES],
783 codes: [[0; MAX_HUFF_SYMBOLS]; MAX_HUFF_TABLES],
784 code_sizes: [[0; MAX_HUFF_SYMBOLS]; MAX_HUFF_TABLES],
785 }
786 }
787}
788
789impl HuffmanOxide {
790 fn radix_sort_symbols<'a>(
791 symbols0: &'a mut [SymFreq],
792 symbols1: &'a mut [SymFreq],
793 ) -> &'a mut [SymFreq] {
794 let mut hist = [[0; 256]; 2];
795
796 for freq in symbols0.iter() {
797 hist[0][(freq.key & 0xFF) as usize] += 1;
798 hist[1][((freq.key >> 8) & 0xFF) as usize] += 1;
799 }
800
801 let mut n_passes = 2;
802 if symbols0.len() == hist[1][0] {
803 n_passes -= 1;
804 }
805
806 let mut current_symbols = symbols0;
807 let mut new_symbols = symbols1;
808
809 for (pass, hist_item) in hist.iter().enumerate().take(n_passes) {
810 let mut offsets = [0; 256];
811 let mut offset = 0;
812 for i in 0..256 {
813 offsets[i] = offset;
814 offset += hist_item[i];
815 }
816
817 for sym in current_symbols.iter() {
818 let j = ((sym.key >> (pass * 8)) & 0xFF) as usize;
819 new_symbols[offsets[j]] = *sym;
820 offsets[j] += 1;
821 }
822
823 mem::swap(&mut current_symbols, &mut new_symbols);
824 }
825
826 current_symbols
827 }
828
829 fn calculate_minimum_redundancy(symbols: &mut [SymFreq]) {
830 match symbols.len() {
831 0 => (),
832 1 => symbols[0].key = 1,
833 n => {
834 symbols[0].key += symbols[1].key;
835 let mut root = 0;
836 let mut leaf = 2;
837 for next in 1..n - 1 {
838 if (leaf >= n) || (symbols[root].key < symbols[leaf].key) {
839 symbols[next].key = symbols[root].key;
840 symbols[root].key = next as u16;
841 root += 1;
842 } else {
843 symbols[next].key = symbols[leaf].key;
844 leaf += 1;
845 }
846
847 if (leaf >= n) || (root < next && symbols[root].key < symbols[leaf].key) {
848 symbols[next].key = symbols[next].key.wrapping_add(symbols[root].key);
849 symbols[root].key = next as u16;
850 root += 1;
851 } else {
852 symbols[next].key = symbols[next].key.wrapping_add(symbols[leaf].key);
853 leaf += 1;
854 }
855 }
856
857 symbols[n - 2].key = 0;
858 for next in (0..n - 2).rev() {
859 symbols[next].key = symbols[symbols[next].key as usize].key + 1;
860 }
861
862 let mut avbl = 1;
863 let mut used = 0;
864 let mut dpth = 0;
865 let mut root = (n - 2) as i32;
866 let mut next = (n - 1) as i32;
867 while avbl > 0 {
868 while (root >= 0) && (symbols[root as usize].key == dpth) {
869 used += 1;
870 root -= 1;
871 }
872 while avbl > used {
873 symbols[next as usize].key = dpth;
874 next -= 1;
875 avbl -= 1;
876 }
877 avbl = 2 * used;
878 dpth += 1;
879 used = 0;
880 }
881 }
882 }
883 }
884
885 fn enforce_max_code_size(num_codes: &mut [i32], code_list_len: usize, max_code_size: usize) {
886 if code_list_len <= 1 {
887 return;
888 }
889
890 num_codes[max_code_size] += num_codes[max_code_size + 1..].iter().sum::<i32>();
891 let total = num_codes[1..=max_code_size]
892 .iter()
893 .rev()
894 .enumerate()
895 .fold(0u32, |total, (i, &x)| total + ((x as u32) << i));
896
897 for _ in (1 << max_code_size)..total {
898 num_codes[max_code_size] -= 1;
899 for i in (1..max_code_size).rev() {
900 if num_codes[i] != 0 {
901 num_codes[i] -= 1;
902 num_codes[i + 1] += 2;
903 break;
904 }
905 }
906 }
907 }
908
909 fn optimize_table(
910 &mut self,
911 table_num: usize,
912 table_len: usize,
913 code_size_limit: usize,
914 static_table: bool,
915 ) {
916 let mut num_codes = [0i32; 32 + 1];
917 let mut next_code = [0u32; MAX_SUPPORTED_HUFF_CODESIZE + 1];
918
919 if static_table {
920 for &code_size in &self.code_sizes[table_num][..table_len] {
921 num_codes[code_size as usize] += 1;
922 }
923 } else {
924 let mut symbols0 = [SymFreq {
925 key: 0,
926 sym_index: 0,
927 }; MAX_HUFF_SYMBOLS];
928 let mut symbols1 = [SymFreq {
929 key: 0,
930 sym_index: 0,
931 }; MAX_HUFF_SYMBOLS];
932
933 let mut num_used_symbols = 0;
934 for i in 0..table_len {
935 if self.count[table_num][i] != 0 {
936 symbols0[num_used_symbols] = SymFreq {
937 key: self.count[table_num][i],
938 sym_index: i as u16,
939 };
940 num_used_symbols += 1;
941 }
942 }
943
944 let symbols = Self::radix_sort_symbols(
945 &mut symbols0[..num_used_symbols],
946 &mut symbols1[..num_used_symbols],
947 );
948 Self::calculate_minimum_redundancy(symbols);
949
950 for symbol in symbols.iter() {
951 num_codes[symbol.key as usize] += 1;
952 }
953
954 Self::enforce_max_code_size(&mut num_codes, num_used_symbols, code_size_limit);
955
956 self.code_sizes[table_num].fill(0);
957 self.codes[table_num].fill(0);
958
959 let mut last = num_used_symbols;
960 for (i, &num_item) in num_codes
961 .iter()
962 .enumerate()
963 .take(code_size_limit + 1)
964 .skip(1)
965 {
966 let first = last - num_item as usize;
967 for symbol in &symbols[first..last] {
968 self.code_sizes[table_num][symbol.sym_index as usize] = i as u8;
969 }
970 last = first;
971 }
972 }
973
974 let mut j = 0;
975 next_code[1] = 0;
976 for i in 2..=code_size_limit {
977 j = (j + num_codes[i - 1]) << 1;
978 next_code[i] = j as u32;
979 }
980
981 for (&code_size, huff_code) in self.code_sizes[table_num]
982 .iter()
983 .take(table_len)
984 .zip(self.codes[table_num].iter_mut().take(table_len))
985 {
986 if code_size == 0 {
987 continue;
988 }
989
990 let code = next_code[code_size as usize];
991
992 next_code[code_size as usize] += 1;
993
994 let rev_code = (code as u16).reverse_bits() >> (16 - code_size);
995
996 *huff_code = rev_code;
997 }
998 }
999
1000 fn start_static_block(&mut self, output: &mut OutputBufferOxide) {
1001 self.code_sizes[LITLEN_TABLE][0..144].fill(8);
1002 self.code_sizes[LITLEN_TABLE][144..256].fill(9);
1003 self.code_sizes[LITLEN_TABLE][256..280].fill(7);
1004 self.code_sizes[LITLEN_TABLE][280..288].fill(8);
1005
1006 self.code_sizes[DIST_TABLE][..32].fill(5);
1007
1008 self.optimize_table(LITLEN_TABLE, 288, 15, true);
1009 self.optimize_table(DIST_TABLE, 32, 15, true);
1010
1011 output.put_bits(0b01, 2)
1012 }
1013
1014 fn start_dynamic_block(&mut self, output: &mut OutputBufferOxide) -> Result<()> {
1015 self.count[0][256] = 1;
1017
1018 self.optimize_table(0, MAX_HUFF_SYMBOLS_0, 15, false);
1019 self.optimize_table(1, MAX_HUFF_SYMBOLS_1, 15, false);
1020
1021 let num_lit_codes = 286
1022 - &self.code_sizes[0][257..286]
1023 .iter()
1024 .rev()
1025 .take_while(|&x| *x == 0)
1026 .count();
1027
1028 let num_dist_codes = 30
1029 - &self.code_sizes[1][1..30]
1030 .iter()
1031 .rev()
1032 .take_while(|&x| *x == 0)
1033 .count();
1034
1035 let mut code_sizes_to_pack = [0u8; MAX_HUFF_SYMBOLS_0 + MAX_HUFF_SYMBOLS_1];
1036 let mut packed_code_sizes = [0u8; MAX_HUFF_SYMBOLS_0 + MAX_HUFF_SYMBOLS_1];
1037
1038 let total_code_sizes_to_pack = num_lit_codes + num_dist_codes;
1039
1040 code_sizes_to_pack[..num_lit_codes].copy_from_slice(&self.code_sizes[0][..num_lit_codes]);
1041
1042 code_sizes_to_pack[num_lit_codes..total_code_sizes_to_pack]
1043 .copy_from_slice(&self.code_sizes[1][..num_dist_codes]);
1044
1045 let mut rle = Rle {
1046 z_count: 0,
1047 repeat_count: 0,
1048 prev_code_size: 0xFF,
1049 };
1050
1051 self.count[HUFF_CODES_TABLE][..MAX_HUFF_SYMBOLS_2].fill(0);
1052
1053 let mut packed_pos = 0;
1054 for &code_size in &code_sizes_to_pack[..total_code_sizes_to_pack] {
1055 if code_size == 0 {
1056 rle.prev_code_size(&mut packed_code_sizes, &mut packed_pos, self)?;
1057 rle.z_count += 1;
1058 if rle.z_count == 138 {
1059 rle.zero_code_size(&mut packed_code_sizes, &mut packed_pos, self)?;
1060 }
1061 } else {
1062 rle.zero_code_size(&mut packed_code_sizes, &mut packed_pos, self)?;
1063 if code_size != rle.prev_code_size {
1064 rle.prev_code_size(&mut packed_code_sizes, &mut packed_pos, self)?;
1065 self.count[HUFF_CODES_TABLE][code_size as usize] =
1066 self.count[HUFF_CODES_TABLE][code_size as usize].wrapping_add(1);
1067 write(&[code_size], &mut packed_code_sizes, &mut packed_pos)?;
1068 } else {
1069 rle.repeat_count += 1;
1070 if rle.repeat_count == 6 {
1071 rle.prev_code_size(&mut packed_code_sizes, &mut packed_pos, self)?;
1072 }
1073 }
1074 }
1075 rle.prev_code_size = code_size;
1076 }
1077
1078 if rle.repeat_count != 0 {
1079 rle.prev_code_size(&mut packed_code_sizes, &mut packed_pos, self)?;
1080 } else {
1081 rle.zero_code_size(&mut packed_code_sizes, &mut packed_pos, self)?;
1082 }
1083
1084 self.optimize_table(2, MAX_HUFF_SYMBOLS_2, 7, false);
1085
1086 output.put_bits_no_flush(2, 2);
1087
1088 output.put_bits_no_flush((num_lit_codes - 257) as u32, 5);
1089 output.put_bits_no_flush((num_dist_codes - 1) as u32, 5);
1090
1091 let mut num_bit_lengths = 18
1092 - HUFFMAN_LENGTH_ORDER
1093 .iter()
1094 .rev()
1095 .take_while(|&swizzle| self.code_sizes[HUFF_CODES_TABLE][*swizzle as usize] == 0)
1096 .count();
1097
1098 num_bit_lengths = cmp::max(4, num_bit_lengths + 1);
1099 output.put_bits(num_bit_lengths as u32 - 4, 4);
1100 for &swizzle in &HUFFMAN_LENGTH_ORDER[..num_bit_lengths] {
1101 output.put_bits(
1102 u32::from(self.code_sizes[HUFF_CODES_TABLE][swizzle as usize]),
1103 3,
1104 );
1105 }
1106
1107 let mut packed_code_size_index = 0;
1108 while packed_code_size_index < packed_pos {
1109 let code = packed_code_sizes[packed_code_size_index] as usize;
1110 packed_code_size_index += 1;
1111 assert!(code < MAX_HUFF_SYMBOLS_2);
1112 output.put_bits(
1113 u32::from(self.codes[HUFF_CODES_TABLE][code]),
1114 u32::from(self.code_sizes[HUFF_CODES_TABLE][code]),
1115 );
1116 if code >= 16 {
1117 output.put_bits(
1118 u32::from(packed_code_sizes[packed_code_size_index]),
1119 [2, 3, 7][code - 16],
1120 );
1121 packed_code_size_index += 1;
1122 }
1123 }
1124
1125 Ok(())
1126 }
1127}
1128
1129pub(crate) struct DictOxide {
1130 pub max_probes: [u32; 2],
1133 pub b: HashBuffers,
1136
1137 pub code_buf_dict_pos: usize,
1138 pub lookahead_size: usize,
1139 pub lookahead_pos: usize,
1140 pub size: usize,
1141 loop_len: u8,
1142}
1143
1144const fn probes_from_flags(flags: u32) -> [u32; 2] {
1145 [
1146 1 + ((flags & 0xFFF) + 2) / 3,
1147 1 + (((flags & 0xFFF) >> 2) + 2) / 3,
1148 ]
1149}
1150
1151impl DictOxide {
1152 fn new(flags: u32) -> Self {
1153 DictOxide {
1154 max_probes: probes_from_flags(flags),
1155 b: HashBuffers::default(),
1156 code_buf_dict_pos: 0,
1157 lookahead_size: 0,
1158 lookahead_pos: 0,
1159 size: 0,
1160 loop_len: 32,
1161 }
1162 }
1163
1164 fn update_flags(&mut self, flags: u32) {
1165 self.max_probes = probes_from_flags(flags);
1166 }
1167
1168 fn reset(&mut self) {
1169 self.b.reset();
1170 self.code_buf_dict_pos = 0;
1171 self.lookahead_size = 0;
1172 self.lookahead_pos = 0;
1173 self.size = 0;
1174 }
1175
1176 #[inline]
1179 fn read_unaligned_u32(&self, pos: usize) -> u32 {
1180 let pos = pos & LZ_DICT_SIZE_MASK;
1182 let end = pos + 4;
1183 assert!(end < LZ_DICT_FULL_SIZE);
1187
1188 let bytes: [u8; 4] = self.b.dict[pos..end].try_into().unwrap();
1189 u32::from_le_bytes(bytes)
1190 }
1191
1192 #[inline]
1195 fn read_unaligned_u64(&self, pos: usize) -> u64 {
1196 let pos = pos & LZ_DICT_SIZE_MASK;
1200 let bytes: [u8; 8] = self.b.dict[pos..pos + 8].try_into().unwrap();
1201 u64::from_le_bytes(bytes)
1202 }
1203
1204 fn find_match(
1209 &self,
1210 lookahead_pos: usize,
1211 max_dist: usize,
1212 max_match_len: u32,
1213 mut match_dist: u32,
1214 mut match_len: u32,
1215 ) -> (u32, u32) {
1216 let max_match_len = cmp::min(MAX_MATCH_LEN as u32, max_match_len);
1222 match_len = cmp::max(match_len, 1);
1223
1224 if max_match_len <= match_len {
1226 return (match_dist, match_len);
1227 }
1228
1229 let pos = lookahead_pos & LZ_DICT_SIZE_MASK;
1230 let mut probe_pos = pos;
1231 let mut num_probes_left = if match_len < 32 {
1233 self.max_probes[0]
1234 } else {
1235 self.max_probes[1]
1236 };
1237
1238 let mut c01: u16 = read_u16_le(&self.b.dict, pos + match_len as usize - 1);
1240 let s01: u16 = read_u16_le(&self.b.dict, pos);
1242
1243 'outer: loop {
1244 let mut dist;
1245 'found: loop {
1246 num_probes_left -= 1;
1247 if num_probes_left == 0 {
1248 return (match_dist, match_len);
1251 }
1252
1253 for _ in 0..3 {
1254 let next_probe_pos = self.b.next[probe_pos] as usize;
1255
1256 dist = (lookahead_pos - next_probe_pos) & 0xFFFF;
1257 if next_probe_pos == 0 || dist > max_dist {
1258 return (match_dist, match_len);
1262 }
1263
1264 probe_pos = next_probe_pos & LZ_DICT_SIZE_MASK;
1267
1268 if read_u16_le(&self.b.dict, probe_pos + match_len as usize - 1) == c01 {
1270 break 'found;
1271 }
1272 }
1273 }
1274
1275 if dist == 0 {
1276 return (match_dist, match_len);
1279 }
1280
1281 if read_u16_le(&self.b.dict, probe_pos) != s01 {
1283 continue;
1284 }
1285
1286 let mut p = pos + 2;
1287 let mut q = probe_pos + 2;
1288 for _ in 0..self.loop_len as i32 {
1293 let p_data: u64 = self.read_unaligned_u64(p);
1294 let q_data: u64 = self.read_unaligned_u64(q);
1295 let xor_data = p_data ^ q_data;
1297 if xor_data == 0 {
1298 p += 8;
1299 q += 8;
1300 } else {
1301 let trailing = xor_data.trailing_zeros();
1303
1304 let probe_len = p - pos + (trailing as usize >> 3);
1305 if probe_len > match_len as usize {
1306 match_dist = dist as u32;
1307 match_len = cmp::min(max_match_len, probe_len as u32);
1308 if match_len == max_match_len {
1309 return (match_dist, match_len);
1312 }
1313 c01 = read_u16_le(&self.b.dict, pos + match_len as usize - 1);
1316 }
1317 continue 'outer;
1318 }
1319 }
1320
1321 return (dist as u32, cmp::min(max_match_len, MAX_MATCH_LEN as u32));
1322 }
1323 }
1324}
1325
1326pub(crate) struct ParamsOxide {
1327 pub flags: u32,
1328 pub greedy_parsing: bool,
1329 pub block_index: u32,
1330
1331 pub saved_match_dist: u32,
1332 pub saved_match_len: u32,
1333 pub saved_lit: u8,
1334
1335 pub flush: TDEFLFlush,
1336 pub flush_ofs: u32,
1337 pub flush_remaining: u32,
1338 pub finished: bool,
1339
1340 pub adler32: u32,
1341
1342 pub src_pos: usize,
1343
1344 pub out_buf_ofs: usize,
1345 pub prev_return_status: TDEFLStatus,
1346
1347 pub saved_bit_buffer: u32,
1348 pub saved_bits_in: u32,
1349
1350 pub local_buf: Box<LocalBuf>,
1351}
1352
1353impl ParamsOxide {
1354 fn new(flags: u32) -> Self {
1355 ParamsOxide {
1356 flags,
1357 greedy_parsing: flags & TDEFL_GREEDY_PARSING_FLAG != 0,
1358 block_index: 0,
1359 saved_match_dist: 0,
1360 saved_match_len: 0,
1361 saved_lit: 0,
1362 flush: TDEFLFlush::None,
1363 flush_ofs: 0,
1364 flush_remaining: 0,
1365 finished: false,
1366 adler32: MZ_ADLER32_INIT,
1367 src_pos: 0,
1368 out_buf_ofs: 0,
1369 prev_return_status: TDEFLStatus::Okay,
1370 saved_bit_buffer: 0,
1371 saved_bits_in: 0,
1372 local_buf: Box::default(),
1373 }
1374 }
1375
1376 fn update_flags(&mut self, flags: u32) {
1377 self.flags = flags;
1378 self.greedy_parsing = self.flags & TDEFL_GREEDY_PARSING_FLAG != 0;
1379 }
1380
1381 fn reset(&mut self) {
1383 self.block_index = 0;
1384 self.saved_match_len = 0;
1385 self.saved_match_dist = 0;
1386 self.saved_lit = 0;
1387 self.flush = TDEFLFlush::None;
1388 self.flush_ofs = 0;
1389 self.flush_remaining = 0;
1390 self.finished = false;
1391 self.adler32 = MZ_ADLER32_INIT;
1392 self.src_pos = 0;
1393 self.out_buf_ofs = 0;
1394 self.prev_return_status = TDEFLStatus::Okay;
1395 self.saved_bit_buffer = 0;
1396 self.saved_bits_in = 0;
1397 self.local_buf.b = [0; OUT_BUF_SIZE];
1398 }
1399}
1400
1401pub(crate) struct LZOxide {
1402 pub codes: [u8; LZ_CODE_BUF_SIZE],
1403 pub code_position: usize,
1404 pub flag_position: usize,
1405
1406 pub total_bytes: u32,
1408 pub num_flags_left: u32,
1409}
1410
1411impl LZOxide {
1412 const fn new() -> Self {
1413 LZOxide {
1414 codes: [0; LZ_CODE_BUF_SIZE],
1415 code_position: 1,
1416 flag_position: 0,
1417 total_bytes: 0,
1418 num_flags_left: 8,
1419 }
1420 }
1421
1422 fn write_code(&mut self, val: u8) {
1423 self.codes[usize::from(self.code_position as u16)] = val;
1426 self.code_position += 1;
1427 }
1428
1429 fn init_flag(&mut self) {
1430 if self.num_flags_left == 8 {
1431 *self.get_flag() = 0;
1432 self.code_position -= 1;
1433 } else {
1434 *self.get_flag() >>= self.num_flags_left;
1435 }
1436 }
1437
1438 fn get_flag(&mut self) -> &mut u8 {
1439 &mut self.codes[usize::from(self.flag_position as u16)]
1442 }
1443
1444 fn plant_flag(&mut self) {
1445 self.flag_position = self.code_position;
1446 self.code_position += 1;
1447 }
1448
1449 fn consume_flag(&mut self) {
1450 self.num_flags_left -= 1;
1451 if self.num_flags_left == 0 {
1452 self.num_flags_left = 8;
1453 self.plant_flag();
1454 }
1455 }
1456}
1457
1458fn compress_lz_codes(
1459 huff: &HuffmanOxide,
1460 output: &mut OutputBufferOxide,
1461 lz_code_buf: &[u8; LZ_CODE_BUF_SIZE],
1462 lz_code_buf_used_len: usize,
1463) -> Result<bool> {
1464 let mut flags = 1;
1465 let mut bb = BitBuffer {
1466 bit_buffer: u64::from(output.bit_buffer),
1467 bits_in: output.bits_in,
1468 };
1469
1470 let lz_code_buf_used_len = cmp::min(lz_code_buf.len(), lz_code_buf_used_len);
1473
1474 let mut i: usize = 0;
1475 while i < lz_code_buf_used_len {
1476 if flags == 1 {
1477 flags = u32::from(lz_code_buf[i]) | 0x100;
1478 i += 1;
1479 }
1480
1481 if flags & 1 == 1 {
1483 flags >>= 1;
1484
1485 let sym;
1486 let num_extra_bits;
1487
1488 let match_len = lz_code_buf[i] as usize;
1489
1490 let match_dist = read_u16_le(lz_code_buf, i + 1);
1491
1492 i += 3;
1493
1494 debug_assert!(huff.code_sizes[0][LEN_SYM[match_len] as usize] != 0);
1495 bb.put_fast(
1496 u64::from(huff.codes[0][LEN_SYM[match_len] as usize]),
1497 u32::from(huff.code_sizes[0][LEN_SYM[match_len] as usize]),
1498 );
1499 bb.put_fast(
1500 match_len as u64 & u64::from(BITMASKS[LEN_EXTRA[match_len] as usize]),
1501 u32::from(LEN_EXTRA[match_len]),
1502 );
1503
1504 if match_dist < 512 {
1505 sym = SMALL_DIST_SYM[match_dist as usize] as usize;
1506 num_extra_bits = SMALL_DIST_EXTRA[match_dist as usize] as usize;
1507 } else {
1508 sym = LARGE_DIST_SYM[(match_dist >> 8) as usize] as usize;
1509 num_extra_bits = LARGE_DIST_EXTRA[(match_dist >> 8) as usize] as usize;
1510 }
1511
1512 debug_assert!(huff.code_sizes[1][sym] != 0);
1513 bb.put_fast(
1514 u64::from(huff.codes[1][sym]),
1515 u32::from(huff.code_sizes[1][sym]),
1516 );
1517 bb.put_fast(
1518 u64::from(match_dist) & u64::from(BITMASKS[num_extra_bits]),
1519 num_extra_bits as u32,
1520 );
1521 } else {
1522 for _ in 0..3 {
1524 flags >>= 1;
1525 let lit = lz_code_buf[i & (LZ_CODE_BUF_SIZE - 1)];
1526 i += 1;
1527
1528 debug_assert!(huff.code_sizes[0][lit as usize] != 0);
1529 bb.put_fast(
1530 u64::from(huff.codes[0][lit as usize]),
1531 u32::from(huff.code_sizes[0][lit as usize]),
1532 );
1533
1534 if flags & 1 == 1 || i >= lz_code_buf_used_len {
1535 break;
1536 }
1537 }
1538 }
1539
1540 bb.flush(output)?;
1541 }
1542
1543 output.bits_in = 0;
1544 output.bit_buffer = 0;
1545 while bb.bits_in != 0 {
1546 let n = cmp::min(bb.bits_in, 16);
1547 output.put_bits(bb.bit_buffer as u32 & BITMASKS[n as usize], n);
1548 bb.bit_buffer >>= n;
1549 bb.bits_in -= n;
1550 }
1551
1552 output.put_bits(
1554 u32::from(huff.codes[0][256]),
1555 u32::from(huff.code_sizes[0][256]),
1556 );
1557
1558 Ok(true)
1559}
1560
1561fn compress_block(
1562 huff: &mut HuffmanOxide,
1563 output: &mut OutputBufferOxide,
1564 lz: &LZOxide,
1565 static_block: bool,
1566) -> Result<bool> {
1567 if static_block {
1568 huff.start_static_block(output);
1569 } else {
1570 huff.start_dynamic_block(output)?;
1571 }
1572
1573 compress_lz_codes(huff, output, &lz.codes, lz.code_position)
1574}
1575
1576pub(crate) fn flush_block(
1577 d: &mut CompressorOxide,
1578 callback: &mut CallbackOxide,
1579 flush: TDEFLFlush,
1580) -> Result<i32> {
1581 let mut saved_buffer;
1582 {
1583 let mut output = callback
1584 .out
1585 .new_output_buffer(&mut d.params.local_buf.b, d.params.out_buf_ofs);
1586 output.bit_buffer = d.params.saved_bit_buffer;
1587 output.bits_in = d.params.saved_bits_in;
1588
1589 let use_raw_block = (d.params.flags & TDEFL_FORCE_ALL_RAW_BLOCKS != 0)
1591 && (d.dict.lookahead_pos - d.dict.code_buf_dict_pos) <= d.dict.size;
1592 debug_assert_eq!(
1593 use_raw_block,
1594 d.params.flags & TDEFL_FORCE_ALL_RAW_BLOCKS != 0
1595 );
1596
1597 assert!(d.params.flush_remaining == 0);
1598 d.params.flush_ofs = 0;
1599 d.params.flush_remaining = 0;
1600
1601 d.lz.init_flag();
1602
1603 if d.params.flags & TDEFL_WRITE_ZLIB_HEADER != 0 && d.params.block_index == 0 {
1605 let header = zlib::header_from_flags(d.params.flags);
1606 output.put_bits_no_flush(header[0].into(), 8);
1607 output.put_bits(header[1].into(), 8);
1608 }
1609
1610 output.put_bits((flush == TDEFLFlush::Finish) as u32, 1);
1612
1613 saved_buffer = output.save();
1614
1615 let comp_success = if !use_raw_block {
1616 let use_static =
1617 (d.params.flags & TDEFL_FORCE_ALL_STATIC_BLOCKS != 0) || (d.lz.total_bytes < 48);
1618 compress_block(&mut d.huff, &mut output, &d.lz, use_static)?
1619 } else {
1620 false
1621 };
1622
1623 let expanded = (d.lz.total_bytes > 32)
1633 && (output.inner_pos - saved_buffer.pos + 1 >= (d.lz.total_bytes as usize))
1634 && (d.dict.lookahead_pos - d.dict.code_buf_dict_pos <= d.dict.size);
1635
1636 if use_raw_block || expanded {
1637 output.load(saved_buffer);
1638
1639 output.put_bits(0, 2);
1641
1642 output.pad_to_bytes();
1644
1645 output.put_bits(d.lz.total_bytes & 0xFFFF, 16);
1647 output.put_bits(!d.lz.total_bytes & 0xFFFF, 16);
1648
1649 let start = d.dict.code_buf_dict_pos & LZ_DICT_SIZE_MASK;
1651 let end = (d.dict.code_buf_dict_pos + d.lz.total_bytes as usize) & LZ_DICT_SIZE_MASK;
1652 let dict = &mut d.dict.b.dict;
1653 if start < end {
1654 output.write_bytes(&dict[start..end]);
1656 } else if d.lz.total_bytes > 0 {
1657 output.write_bytes(&dict[start..LZ_DICT_SIZE]);
1659 output.write_bytes(&dict[..end]);
1660 }
1661 } else if !comp_success {
1662 output.load(saved_buffer);
1663 compress_block(&mut d.huff, &mut output, &d.lz, true)?;
1664 }
1665
1666 if flush != TDEFLFlush::None {
1667 if flush == TDEFLFlush::Finish {
1668 output.pad_to_bytes();
1669 if d.params.flags & TDEFL_WRITE_ZLIB_HEADER != 0 {
1670 let mut adler = d.params.adler32;
1671 for _ in 0..4 {
1672 output.put_bits((adler >> 24) & 0xFF, 8);
1673 adler <<= 8;
1674 }
1675 }
1676 } else {
1677 output.put_bits(0, 3);
1680 output.pad_to_bytes();
1681 output.put_bits(0, 16);
1682 output.put_bits(0xFFFF, 16);
1683 }
1684 }
1685
1686 d.huff.count[0][..MAX_HUFF_SYMBOLS_0].fill(0);
1687 d.huff.count[1][..MAX_HUFF_SYMBOLS_1].fill(0);
1688
1689 d.lz.code_position = 1;
1691 d.lz.flag_position = 0;
1692 d.lz.num_flags_left = 8;
1693 d.dict.code_buf_dict_pos += d.lz.total_bytes as usize;
1694 d.lz.total_bytes = 0;
1695 d.params.block_index += 1;
1696
1697 saved_buffer = output.save();
1698
1699 d.params.saved_bit_buffer = saved_buffer.bit_buffer;
1700 d.params.saved_bits_in = saved_buffer.bits_in;
1701 }
1702
1703 Ok(callback.flush_output(saved_buffer, &mut d.params))
1704}
1705
1706pub(crate) fn record_literal(h: &mut HuffmanOxide, lz: &mut LZOxide, lit: u8) {
1707 lz.total_bytes += 1;
1708 lz.write_code(lit);
1709
1710 *lz.get_flag() >>= 1;
1711 lz.consume_flag();
1712
1713 h.count[0][lit as usize] += 1;
1714}
1715
1716fn record_match(h: &mut HuffmanOxide, lz: &mut LZOxide, mut match_len: u32, mut match_dist: u32) {
1717 debug_assert!(match_len >= MIN_MATCH_LEN.into());
1718 debug_assert!(match_dist >= 1);
1719 debug_assert!(match_dist as usize <= LZ_DICT_SIZE);
1720
1721 lz.total_bytes += match_len;
1722 match_dist -= 1;
1723 match_len -= u32::from(MIN_MATCH_LEN);
1724 lz.write_code(match_len as u8);
1725 lz.write_code(match_dist as u8);
1726 lz.write_code((match_dist >> 8) as u8);
1727
1728 *lz.get_flag() >>= 1;
1729 *lz.get_flag() |= 0x80;
1730 lz.consume_flag();
1731
1732 let symbol = if match_dist < 512 {
1733 SMALL_DIST_SYM[match_dist as usize]
1734 } else {
1735 LARGE_DIST_SYM[((match_dist >> 8) & 127) as usize]
1736 } as usize;
1737 h.count[1][symbol] += 1;
1738 h.count[0][LEN_SYM[usize::from(match_len as u8)] as usize] += 1;
1740}
1741
1742fn compress_normal(d: &mut CompressorOxide, callback: &mut CallbackOxide) -> bool {
1743 let in_buf = match callback.in_buf {
1744 None => return true,
1745 Some(in_buf) => in_buf,
1746 };
1747
1748 let mut src_pos = d.params.src_pos;
1749 let mut lookahead_size = d.dict.lookahead_size;
1750 let mut lookahead_pos = d.dict.lookahead_pos;
1751 let mut saved_lit = d.params.saved_lit;
1752 let mut saved_match_dist = d.params.saved_match_dist;
1753 let mut saved_match_len = d.params.saved_match_len;
1754
1755 while src_pos < in_buf.len() || (d.params.flush != TDEFLFlush::None && lookahead_size != 0) {
1756 let src_buf_left = in_buf.len() - src_pos;
1757 let num_bytes_to_process = cmp::min(src_buf_left, MAX_MATCH_LEN - lookahead_size);
1758
1759 if lookahead_size + d.dict.size >= usize::from(MIN_MATCH_LEN) - 1
1760 && num_bytes_to_process > 0
1761 {
1762 let dictb = &mut d.dict.b;
1763
1764 let mut dst_pos = (lookahead_pos + lookahead_size) & LZ_DICT_SIZE_MASK;
1765 let mut ins_pos = lookahead_pos + lookahead_size - 2;
1766 let mut hash = update_hash(
1768 u16::from(dictb.dict[ins_pos & LZ_DICT_SIZE_MASK]),
1769 dictb.dict[(ins_pos + 1) & LZ_DICT_SIZE_MASK],
1770 );
1771
1772 lookahead_size += num_bytes_to_process;
1773
1774 for &c in &in_buf[src_pos..src_pos + num_bytes_to_process] {
1775 dictb.dict[dst_pos] = c;
1777 if dst_pos < MAX_MATCH_LEN - 1 {
1778 dictb.dict[LZ_DICT_SIZE + dst_pos] = c;
1779 }
1780
1781 hash = update_hash(hash, c);
1783 dictb.next[ins_pos & LZ_DICT_SIZE_MASK] = dictb.hash[hash as usize];
1784 dictb.hash[hash as usize] = ins_pos as u16;
1786 dst_pos = (dst_pos + 1) & LZ_DICT_SIZE_MASK;
1787 ins_pos += 1;
1788 }
1789 src_pos += num_bytes_to_process;
1790 } else {
1791 let dictb = &mut d.dict.b;
1792 for &c in &in_buf[src_pos..src_pos + num_bytes_to_process] {
1793 let dst_pos = (lookahead_pos + lookahead_size) & LZ_DICT_SIZE_MASK;
1794 dictb.dict[dst_pos] = c;
1795 if dst_pos < MAX_MATCH_LEN - 1 {
1796 dictb.dict[LZ_DICT_SIZE + dst_pos] = c;
1797 }
1798
1799 lookahead_size += 1;
1800 if lookahead_size + d.dict.size >= MIN_MATCH_LEN.into() {
1801 let ins_pos = lookahead_pos + lookahead_size - 3;
1802 let hash = ((u32::from(dictb.dict[ins_pos & LZ_DICT_SIZE_MASK])
1803 << (LZ_HASH_SHIFT * 2))
1804 ^ ((u32::from(dictb.dict[(ins_pos + 1) & LZ_DICT_SIZE_MASK])
1805 << LZ_HASH_SHIFT)
1806 ^ u32::from(c)))
1807 & (LZ_HASH_SIZE as u32 - 1);
1808
1809 dictb.next[ins_pos & LZ_DICT_SIZE_MASK] = dictb.hash[hash as usize];
1810 dictb.hash[hash as usize] = ins_pos as u16;
1811 }
1812 }
1813
1814 src_pos += num_bytes_to_process;
1815 }
1816
1817 d.dict.size = cmp::min(LZ_DICT_SIZE - lookahead_size, d.dict.size);
1818 if d.params.flush == TDEFLFlush::None && lookahead_size < MAX_MATCH_LEN {
1819 break;
1820 }
1821
1822 let mut len_to_move = 1;
1823 let mut cur_match_dist = 0;
1824 let mut cur_match_len = if saved_match_len != 0 {
1825 saved_match_len
1826 } else {
1827 u32::from(MIN_MATCH_LEN) - 1
1828 };
1829 let cur_pos = lookahead_pos & LZ_DICT_SIZE_MASK;
1830 if d.params.flags & TDEFL_RLE_MATCHES != 0 {
1831 if d.dict.size != 0 {
1833 let c = d.dict.b.dict[(cur_pos.wrapping_sub(1)) & LZ_DICT_SIZE_MASK];
1834 cur_match_len = d.dict.b.dict[cur_pos..(cur_pos + lookahead_size)]
1835 .iter()
1836 .take_while(|&x| *x == c)
1837 .count() as u32;
1838 if cur_match_len < MIN_MATCH_LEN.into() {
1839 cur_match_len = 0
1840 } else {
1841 cur_match_dist = 1
1842 }
1843 }
1844 } else {
1845 let dist_len = d.dict.find_match(
1847 lookahead_pos,
1848 d.dict.size,
1849 lookahead_size as u32,
1850 cur_match_dist,
1851 cur_match_len,
1852 );
1853 cur_match_dist = dist_len.0;
1854 cur_match_len = dist_len.1;
1855 }
1856
1857 let far_and_small = cur_match_len == MIN_MATCH_LEN.into() && cur_match_dist >= 8 * 1024;
1858 let filter_small = d.params.flags & TDEFL_FILTER_MATCHES != 0 && cur_match_len <= 5;
1859 if far_and_small || filter_small || cur_pos == cur_match_dist as usize {
1860 cur_match_dist = 0;
1861 cur_match_len = 0;
1862 }
1863
1864 if saved_match_len != 0 {
1865 if cur_match_len > saved_match_len {
1866 record_literal(&mut d.huff, &mut d.lz, saved_lit);
1867 if cur_match_len >= 128 {
1868 record_match(&mut d.huff, &mut d.lz, cur_match_len, cur_match_dist);
1869 saved_match_len = 0;
1870 len_to_move = cur_match_len as usize;
1871 } else {
1872 saved_lit = d.dict.b.dict[cur_pos];
1873 saved_match_dist = cur_match_dist;
1874 saved_match_len = cur_match_len;
1875 }
1876 } else {
1877 record_match(&mut d.huff, &mut d.lz, saved_match_len, saved_match_dist);
1878 len_to_move = (saved_match_len - 1) as usize;
1879 saved_match_len = 0;
1880 }
1881 } else if cur_match_dist == 0 {
1882 record_literal(
1883 &mut d.huff,
1884 &mut d.lz,
1885 d.dict.b.dict[cmp::min(cur_pos, d.dict.b.dict.len() - 1)],
1886 );
1887 } else if d.params.greedy_parsing
1888 || (d.params.flags & TDEFL_RLE_MATCHES != 0)
1889 || cur_match_len >= 128
1890 {
1891 record_match(&mut d.huff, &mut d.lz, cur_match_len, cur_match_dist);
1894 len_to_move = cur_match_len as usize;
1895 } else {
1896 saved_lit = d.dict.b.dict[cmp::min(cur_pos, d.dict.b.dict.len() - 1)];
1897 saved_match_dist = cur_match_dist;
1898 saved_match_len = cur_match_len;
1899 }
1900
1901 lookahead_pos += len_to_move;
1902 assert!(lookahead_size >= len_to_move);
1903 lookahead_size -= len_to_move;
1904 d.dict.size = cmp::min(d.dict.size + len_to_move, LZ_DICT_SIZE);
1905
1906 let lz_buf_tight = d.lz.code_position > LZ_CODE_BUF_SIZE - 8;
1907 let fat = ((d.lz.code_position * 115) >> 7) >= d.lz.total_bytes as usize;
1908 let buf_fat = (d.lz.total_bytes > 31 * 1024) && fat;
1909
1910 if lz_buf_tight || buf_fat {
1911 d.params.src_pos = src_pos;
1912 d.dict.lookahead_size = lookahead_size;
1914 d.dict.lookahead_pos = lookahead_pos;
1915
1916 let n = flush_block(d, callback, TDEFLFlush::None)
1917 .unwrap_or(TDEFLStatus::PutBufFailed as i32);
1918 if n != 0 {
1919 d.params.saved_lit = saved_lit;
1920 d.params.saved_match_dist = saved_match_dist;
1921 d.params.saved_match_len = saved_match_len;
1922 return n > 0;
1923 }
1924 }
1925 }
1926
1927 d.params.src_pos = src_pos;
1928 d.dict.lookahead_size = lookahead_size;
1929 d.dict.lookahead_pos = lookahead_pos;
1930 d.params.saved_lit = saved_lit;
1931 d.params.saved_match_dist = saved_match_dist;
1932 d.params.saved_match_len = saved_match_len;
1933 true
1934}
1935
1936const COMP_FAST_LOOKAHEAD_SIZE: usize = 4096;
1937
1938fn compress_fast(d: &mut CompressorOxide, callback: &mut CallbackOxide) -> bool {
1939 let mut src_pos = d.params.src_pos;
1940 let mut lookahead_size = d.dict.lookahead_size;
1941 let mut lookahead_pos = d.dict.lookahead_pos;
1942
1943 let mut cur_pos = lookahead_pos & LZ_DICT_SIZE_MASK;
1944 let in_buf = match callback.in_buf {
1945 None => return true,
1946 Some(in_buf) => in_buf,
1947 };
1948
1949 debug_assert!(d.lz.code_position < LZ_CODE_BUF_SIZE - 2);
1950
1951 while src_pos < in_buf.len() || (d.params.flush != TDEFLFlush::None && lookahead_size > 0) {
1952 let mut dst_pos = (lookahead_pos + lookahead_size) & LZ_DICT_SIZE_MASK;
1953 let mut num_bytes_to_process = cmp::min(
1954 in_buf.len() - src_pos,
1955 COMP_FAST_LOOKAHEAD_SIZE - lookahead_size,
1956 );
1957 lookahead_size += num_bytes_to_process;
1958
1959 while num_bytes_to_process != 0 {
1960 let n = cmp::min(LZ_DICT_SIZE - dst_pos, num_bytes_to_process);
1961 d.dict.b.dict[dst_pos..dst_pos + n].copy_from_slice(&in_buf[src_pos..src_pos + n]);
1962
1963 if dst_pos < MAX_MATCH_LEN - 1 {
1964 let m = cmp::min(n, MAX_MATCH_LEN - 1 - dst_pos);
1965 d.dict.b.dict[dst_pos + LZ_DICT_SIZE..dst_pos + LZ_DICT_SIZE + m]
1966 .copy_from_slice(&in_buf[src_pos..src_pos + m]);
1967 }
1968
1969 src_pos += n;
1970 dst_pos = (dst_pos + n) & LZ_DICT_SIZE_MASK;
1971 num_bytes_to_process -= n;
1972 }
1973
1974 d.dict.size = cmp::min(LZ_DICT_SIZE - lookahead_size, d.dict.size);
1975 if d.params.flush == TDEFLFlush::None && lookahead_size < COMP_FAST_LOOKAHEAD_SIZE {
1976 break;
1977 }
1978
1979 while lookahead_size >= 4 {
1980 let mut cur_match_len = 1;
1981
1982 let first_trigram = d.dict.read_unaligned_u32(cur_pos) & 0xFF_FFFF;
1983
1984 let hash = (first_trigram ^ (first_trigram >> (24 - (LZ_HASH_BITS - 8))))
1985 & LEVEL1_HASH_SIZE_MASK;
1986
1987 let mut probe_pos = usize::from(d.dict.b.hash[hash as usize]);
1988 d.dict.b.hash[hash as usize] = lookahead_pos as u16;
1989
1990 let mut cur_match_dist = (lookahead_pos - probe_pos) as u16;
1991 if cur_match_dist as usize <= d.dict.size {
1992 probe_pos &= LZ_DICT_SIZE_MASK;
1993
1994 let trigram = d.dict.read_unaligned_u32(probe_pos) & 0xFF_FFFF;
1995
1996 if first_trigram == trigram {
1997 let mut p = cur_pos + 3;
1999 let mut q = probe_pos + 3;
2000 cur_match_len = (|| {
2001 for _ in 0..32 {
2002 let p_data: u64 = d.dict.read_unaligned_u64(p);
2003 let q_data: u64 = d.dict.read_unaligned_u64(q);
2004 let xor_data = p_data ^ q_data;
2005 if xor_data == 0 {
2006 p += 8;
2007 q += 8;
2008 } else {
2009 let trailing = xor_data.trailing_zeros();
2010 return p as u32 - cur_pos as u32 + (trailing >> 3);
2011 }
2012 }
2013
2014 if cur_match_dist == 0 {
2015 0
2016 } else {
2017 MAX_MATCH_LEN as u32
2018 }
2019 })();
2020
2021 if cur_match_len < MIN_MATCH_LEN.into()
2022 || (cur_match_len == MIN_MATCH_LEN.into() && cur_match_dist >= 8 * 1024)
2023 {
2024 let lit = first_trigram as u8;
2025 cur_match_len = 1;
2026 d.lz.write_code(lit);
2027 *d.lz.get_flag() >>= 1;
2028 d.huff.count[0][lit as usize] += 1;
2029 } else {
2030 cur_match_len = cmp::min(cur_match_len, lookahead_size as u32);
2033 debug_assert!(cur_match_len >= MIN_MATCH_LEN.into());
2034 debug_assert!(cur_match_dist >= 1);
2035 debug_assert!(cur_match_dist as usize <= LZ_DICT_SIZE);
2036 cur_match_dist -= 1;
2037
2038 d.lz.write_code((cur_match_len - u32::from(MIN_MATCH_LEN)) as u8);
2039 d.lz.write_code(cur_match_dist as u8);
2040 d.lz.write_code((cur_match_dist >> 8) as u8);
2041
2042 *d.lz.get_flag() >>= 1;
2043 *d.lz.get_flag() |= 0x80;
2044 if cur_match_dist < 512 {
2045 d.huff.count[1][SMALL_DIST_SYM[cur_match_dist as usize] as usize] += 1;
2046 } else {
2047 d.huff.count[1]
2048 [LARGE_DIST_SYM[(cur_match_dist >> 8) as usize] as usize] += 1;
2049 }
2050
2051 d.huff.count[0][LEN_SYM[(cur_match_len - u32::from(MIN_MATCH_LEN)) as usize]
2052 as usize] += 1;
2053 }
2054 } else {
2055 d.lz.write_code(first_trigram as u8);
2056 *d.lz.get_flag() >>= 1;
2057 d.huff.count[0][first_trigram as u8 as usize] += 1;
2058 }
2059
2060 d.lz.consume_flag();
2061 d.lz.total_bytes += cur_match_len;
2062 lookahead_pos += cur_match_len as usize;
2063 d.dict.size = cmp::min(d.dict.size + cur_match_len as usize, LZ_DICT_SIZE);
2064 cur_pos = (cur_pos + cur_match_len as usize) & LZ_DICT_SIZE_MASK;
2065 lookahead_size -= cur_match_len as usize;
2066
2067 if d.lz.code_position > LZ_CODE_BUF_SIZE - 8 {
2068 d.dict.lookahead_size = lookahead_size;
2070 d.dict.lookahead_pos = lookahead_pos;
2071
2072 let n = match flush_block(d, callback, TDEFLFlush::None) {
2073 Err(_) => {
2074 d.params.src_pos = src_pos;
2075 d.params.prev_return_status = TDEFLStatus::PutBufFailed;
2076 return false;
2077 }
2078 Ok(status) => status,
2079 };
2080 if n != 0 {
2081 d.params.src_pos = src_pos;
2082 return n > 0;
2083 }
2084 debug_assert!(d.lz.code_position < LZ_CODE_BUF_SIZE - 2);
2085
2086 lookahead_size = d.dict.lookahead_size;
2087 lookahead_pos = d.dict.lookahead_pos;
2088 }
2089 }
2090 }
2091
2092 while lookahead_size != 0 {
2093 let lit = d.dict.b.dict[cur_pos];
2094 d.lz.total_bytes += 1;
2095 d.lz.write_code(lit);
2096 *d.lz.get_flag() >>= 1;
2097 d.lz.consume_flag();
2098
2099 d.huff.count[0][lit as usize] += 1;
2100 lookahead_pos += 1;
2101 d.dict.size = cmp::min(d.dict.size + 1, LZ_DICT_SIZE);
2102 cur_pos = (cur_pos + 1) & LZ_DICT_SIZE_MASK;
2103 lookahead_size -= 1;
2104
2105 if d.lz.code_position > LZ_CODE_BUF_SIZE - 8 {
2106 d.dict.lookahead_size = lookahead_size;
2108 d.dict.lookahead_pos = lookahead_pos;
2109
2110 let n = match flush_block(d, callback, TDEFLFlush::None) {
2111 Err(_) => {
2112 d.params.prev_return_status = TDEFLStatus::PutBufFailed;
2113 d.params.src_pos = src_pos;
2114 return false;
2115 }
2116 Ok(status) => status,
2117 };
2118 if n != 0 {
2119 d.params.src_pos = src_pos;
2120 return n > 0;
2121 }
2122
2123 lookahead_size = d.dict.lookahead_size;
2124 lookahead_pos = d.dict.lookahead_pos;
2125 }
2126 }
2127 }
2128
2129 d.params.src_pos = src_pos;
2130 d.dict.lookahead_size = lookahead_size;
2131 d.dict.lookahead_pos = lookahead_pos;
2132 true
2133}
2134
2135fn flush_output_buffer(c: &mut CallbackOxide, p: &mut ParamsOxide) -> (TDEFLStatus, usize, usize) {
2136 let mut res = (TDEFLStatus::Okay, p.src_pos, 0);
2137 if let CallbackOut::Buf(ref mut cb) = c.out {
2138 let n = cmp::min(cb.out_buf.len() - p.out_buf_ofs, p.flush_remaining as usize);
2139 if n != 0 {
2140 cb.out_buf[p.out_buf_ofs..p.out_buf_ofs + n]
2141 .copy_from_slice(&p.local_buf.b[p.flush_ofs as usize..p.flush_ofs as usize + n]);
2142 }
2143 p.flush_ofs += n as u32;
2144 p.flush_remaining -= n as u32;
2145 p.out_buf_ofs += n;
2146 res.2 = p.out_buf_ofs;
2147 }
2148
2149 if p.finished && p.flush_remaining == 0 {
2150 res.0 = TDEFLStatus::Done
2151 }
2152 res
2153}
2154
2155pub fn compress(
2172 d: &mut CompressorOxide,
2173 in_buf: &[u8],
2174 out_buf: &mut [u8],
2175 flush: TDEFLFlush,
2176) -> (TDEFLStatus, usize, usize) {
2177 compress_inner(
2178 d,
2179 &mut CallbackOxide::new_callback_buf(in_buf, out_buf),
2180 flush,
2181 )
2182}
2183
2184pub fn compress_to_output(
2193 d: &mut CompressorOxide,
2194 in_buf: &[u8],
2195 flush: TDEFLFlush,
2196 mut callback_func: impl FnMut(&[u8]) -> bool,
2197) -> (TDEFLStatus, usize) {
2198 let res = compress_inner(
2199 d,
2200 &mut CallbackOxide::new_callback_func(
2201 in_buf,
2202 CallbackFunc {
2203 put_buf_func: &mut callback_func,
2204 },
2205 ),
2206 flush,
2207 );
2208
2209 (res.0, res.1)
2210}
2211
2212fn compress_inner(
2213 d: &mut CompressorOxide,
2214 callback: &mut CallbackOxide,
2215 flush: TDEFLFlush,
2216) -> (TDEFLStatus, usize, usize) {
2217 d.params.out_buf_ofs = 0;
2218 d.params.src_pos = 0;
2219
2220 let prev_ok = d.params.prev_return_status == TDEFLStatus::Okay;
2221 let flush_finish_once = d.params.flush != TDEFLFlush::Finish || flush == TDEFLFlush::Finish;
2222
2223 d.params.flush = flush;
2224 if !prev_ok || !flush_finish_once {
2225 d.params.prev_return_status = TDEFLStatus::BadParam;
2226 return (d.params.prev_return_status, 0, 0);
2227 }
2228
2229 if d.params.flush_remaining != 0 || d.params.finished {
2230 let res = flush_output_buffer(callback, &mut d.params);
2231 d.params.prev_return_status = res.0;
2232 return res;
2233 }
2234
2235 let one_probe = d.params.flags & MAX_PROBES_MASK == 1;
2236 let greedy = d.params.flags & TDEFL_GREEDY_PARSING_FLAG != 0;
2237 let filter_or_rle = d.params.flags & (TDEFL_FILTER_MATCHES | TDEFL_FORCE_ALL_RAW_BLOCKS) != 0;
2238
2239 let raw = d.params.flags & TDEFL_FORCE_ALL_RAW_BLOCKS != 0;
2240
2241 let compress_success = if raw {
2242 compress_stored(d, callback)
2243 } else if one_probe && greedy && !filter_or_rle {
2244 compress_fast(d, callback)
2245 } else {
2246 compress_normal(d, callback)
2247 };
2248
2249 if !compress_success {
2250 return (
2251 d.params.prev_return_status,
2252 d.params.src_pos,
2253 d.params.out_buf_ofs,
2254 );
2255 }
2256
2257 if let Some(in_buf) = callback.in_buf {
2258 if d.params.flags & (TDEFL_WRITE_ZLIB_HEADER | TDEFL_COMPUTE_ADLER32) != 0 {
2259 d.params.adler32 = update_adler32(d.params.adler32, &in_buf[..d.params.src_pos]);
2260 }
2261 }
2262
2263 let flush_none = d.params.flush == TDEFLFlush::None;
2264 let in_left = callback.in_buf.map_or(0, |buf| buf.len()) - d.params.src_pos;
2265 let remaining = in_left != 0 || d.params.flush_remaining != 0;
2266 if !flush_none && d.dict.lookahead_size == 0 && !remaining {
2267 let flush = d.params.flush;
2268 match flush_block(d, callback, flush) {
2269 Err(_) => {
2270 d.params.prev_return_status = TDEFLStatus::PutBufFailed;
2271 return (
2272 d.params.prev_return_status,
2273 d.params.src_pos,
2274 d.params.out_buf_ofs,
2275 );
2276 }
2277 Ok(x) if x < 0 => {
2278 return (
2279 d.params.prev_return_status,
2280 d.params.src_pos,
2281 d.params.out_buf_ofs,
2282 )
2283 }
2284 _ => {
2285 d.params.finished = d.params.flush == TDEFLFlush::Finish;
2286 if d.params.flush == TDEFLFlush::Full {
2287 d.dict.b.hash.fill(0);
2288 d.dict.b.next.fill(0);
2289 d.dict.size = 0;
2290 }
2291 }
2292 }
2293 }
2294
2295 let res = flush_output_buffer(callback, &mut d.params);
2296 d.params.prev_return_status = res.0;
2297
2298 res
2299}
2300
2301pub fn create_comp_flags_from_zip_params(level: i32, window_bits: i32, strategy: i32) -> u32 {
2314 let num_probes = (if level >= 0 {
2315 cmp::min(10, level)
2316 } else {
2317 CompressionLevel::DefaultLevel as i32
2318 }) as usize;
2319 let greedy = if level <= 3 {
2320 TDEFL_GREEDY_PARSING_FLAG
2321 } else {
2322 0
2323 };
2324 let mut comp_flags = u32::from(NUM_PROBES[num_probes]) | greedy;
2325
2326 if window_bits > 0 {
2327 comp_flags |= TDEFL_WRITE_ZLIB_HEADER;
2328 }
2329
2330 if level == 0 {
2331 comp_flags |= TDEFL_FORCE_ALL_RAW_BLOCKS;
2332 } else if strategy == CompressionStrategy::Filtered as i32 {
2333 comp_flags |= TDEFL_FILTER_MATCHES;
2334 } else if strategy == CompressionStrategy::HuffmanOnly as i32 {
2335 comp_flags &= !MAX_PROBES_MASK;
2336 } else if strategy == CompressionStrategy::Fixed as i32 {
2337 comp_flags |= TDEFL_FORCE_ALL_STATIC_BLOCKS;
2338 } else if strategy == CompressionStrategy::RLE as i32 {
2339 comp_flags |= TDEFL_RLE_MATCHES;
2340 }
2341
2342 comp_flags
2343}
2344
2345#[cfg(test)]
2346mod test {
2347 use super::{
2348 compress_to_output, create_comp_flags_from_zip_params, read_u16_le, write_u16_le,
2349 CompressionStrategy, CompressorOxide, TDEFLFlush, TDEFLStatus, DEFAULT_FLAGS,
2350 MZ_DEFAULT_WINDOW_BITS,
2351 };
2352 use crate::inflate::decompress_to_vec;
2353 use alloc::vec;
2354
2355 #[test]
2356 fn u16_to_slice() {
2357 let mut slice = [0, 0];
2358 write_u16_le(2000, &mut slice, 0);
2359 assert_eq!(slice, [208, 7]);
2360 }
2361
2362 #[test]
2363 fn u16_from_slice() {
2364 let slice = [208, 7];
2365 assert_eq!(read_u16_le(&slice, 0), 2000);
2366 }
2367
2368 #[test]
2369 fn compress_output() {
2370 assert_eq!(
2371 DEFAULT_FLAGS,
2372 create_comp_flags_from_zip_params(
2373 4,
2374 MZ_DEFAULT_WINDOW_BITS,
2375 CompressionStrategy::Default as i32
2376 )
2377 );
2378
2379 let slice = [
2380 1, 2, 3, 4, 1, 2, 3, 1, 2, 3, 1, 2, 6, 1, 2, 3, 1, 2, 3, 2, 3, 1, 2, 3,
2381 ];
2382 let mut encoded = vec![];
2383 let flags = create_comp_flags_from_zip_params(6, 0, 0);
2384 let mut d = CompressorOxide::new(flags);
2385 let (status, in_consumed) =
2386 compress_to_output(&mut d, &slice, TDEFLFlush::Finish, |out: &[u8]| {
2387 encoded.extend_from_slice(out);
2388 true
2389 });
2390
2391 assert_eq!(status, TDEFLStatus::Done);
2392 assert_eq!(in_consumed, slice.len());
2393
2394 let decoded = decompress_to_vec(&encoded[..]).unwrap();
2395 assert_eq!(&decoded[..], &slice[..]);
2396 }
2397
2398 #[test]
2399 fn compress_fast() {
2401 let slice = [
2402 1, 2, 3, 4, 1, 2, 3, 1, 2, 3, 1, 2, 6, 1, 2, 3, 1, 2, 3, 2, 3, 1, 2, 3,
2403 ];
2404 let mut encoded = vec![];
2405 let flags = create_comp_flags_from_zip_params(1, 0, 0);
2406 let mut d = CompressorOxide::new(flags);
2407 let (status, in_consumed) =
2408 compress_to_output(&mut d, &slice, TDEFLFlush::Finish, |out: &[u8]| {
2409 encoded.extend_from_slice(out);
2410 true
2411 });
2412
2413 assert_eq!(status, TDEFLStatus::Done);
2414 assert_eq!(in_consumed, slice.len());
2415
2416 assert_eq!(
2418 &encoded[..],
2419 [99, 100, 98, 102, 1, 98, 48, 98, 3, 147, 204, 76, 204, 140, 76, 204, 0]
2420 );
2421
2422 let decoded = decompress_to_vec(&encoded[..]).unwrap();
2423 assert_eq!(&decoded[..], &slice[..]);
2424 }
2425
2426 #[test]
2427 fn zlib_window_bits() {
2428 use crate::inflate::stream::{inflate, InflateState};
2429 use crate::DataFormat;
2430 use alloc::boxed::Box;
2431 let slice = [
2432 1, 2, 3, 4, 1, 2, 3, 1, 2, 3, 1, 2, 6, 1, 2, 3, 1, 2, 3, 2, 3, 1, 2, 3, 35, 22, 22, 2,
2433 6, 2, 6,
2434 ];
2435 let mut encoded = vec![];
2436 let flags = create_comp_flags_from_zip_params(2, 1, CompressionStrategy::RLE.into());
2437 let mut d = CompressorOxide::new(flags);
2438 let (status, in_consumed) =
2439 compress_to_output(&mut d, &slice, TDEFLFlush::Finish, |out: &[u8]| {
2440 encoded.extend_from_slice(out);
2441 true
2442 });
2443
2444 assert_eq!(status, TDEFLStatus::Done);
2445 assert_eq!(in_consumed, slice.len());
2446
2447 let mut output = vec![0; slice.len()];
2448
2449 let mut decompressor = Box::new(InflateState::new(DataFormat::Zlib));
2450
2451 let mut out_slice = output.as_mut_slice();
2452 for i in 0..encoded.len() {
2454 let result = inflate(
2455 &mut decompressor,
2456 &encoded[i..i + 1],
2457 out_slice,
2458 crate::MZFlush::None,
2459 );
2460 out_slice = &mut out_slice[result.bytes_written..];
2461 }
2462 let cmf = decompressor.decompressor().zlib_header().0;
2463 assert_eq!(cmf, 8);
2464 assert_eq!(output, slice)
2465 }
2466}