brotli/enc/backward_references/
mod.rs

1#![allow(dead_code)]
2mod benchmark;
3pub mod hash_to_binary_tree;
4pub mod hq;
5mod test;
6
7use super::super::alloc::{Allocator, SliceWrapper, SliceWrapperMut};
8use super::command::{BrotliDistanceParams, Command, ComputeDistanceCode, InitCommand};
9use super::dictionary_hash::kStaticDictionaryHash;
10use super::hash_to_binary_tree::{H10Buckets, H10DefaultParams, ZopfliNode, H10};
11use super::static_dict::BrotliDictionary;
12use super::static_dict::{
13    FindMatchLengthWithLimit, FindMatchLengthWithLimitMin4, BROTLI_UNALIGNED_LOAD32,
14    BROTLI_UNALIGNED_LOAD64,
15};
16use super::util::{brotli_max_size_t, floatX, Log2FloorNonZero};
17
18static kBrotliMinWindowBits: i32 = 10i32;
19
20static kBrotliMaxWindowBits: i32 = 24i32;
21
22pub static kInvalidMatch: u32 = 0xfffffffu32;
23
24static kCutoffTransformsCount: u32 = 10u32;
25
26static kCutoffTransforms: u64 = 0x71b520au64 << 32 | 0xda2d3200u32 as (u64);
27
28pub static kHashMul32: u32 = 0x1e35a7bdu32;
29
30pub static kHashMul64: u64 = 0x1e35a7bdu64 << 32 | 0x1e35a7bdu64;
31
32pub static kHashMul64Long: u64 = 0x1fe35a7bu32 as (u64) << 32 | 0xd3579bd3u32 as (u64);
33
34#[derive(PartialEq, Eq, Copy, Clone, Debug)]
35#[repr(C)]
36pub enum BrotliEncoderMode {
37    BROTLI_MODE_GENERIC = 0,
38    BROTLI_MODE_TEXT = 1,
39    BROTLI_MODE_FONT = 2,
40    BROTLI_FORCE_LSB_PRIOR = 3,
41    BROTLI_FORCE_MSB_PRIOR = 4,
42    BROTLI_FORCE_UTF8_PRIOR = 5,
43    BROTLI_FORCE_SIGNED_PRIOR = 6,
44}
45
46#[derive(Clone, Copy, Debug, PartialEq)]
47pub struct BrotliHasherParams {
48    /// type of hasher to use (default: type 6, but others have tradeoffs of speed/memory)
49    pub type_: i32,
50    /// number of the number of buckets to have in the hash table (defaults to quality - 1)
51    pub bucket_bits: i32,
52    /// number of potential matches to hold per bucket (hash collisions)
53    pub block_bits: i32,
54    /// number of bytes of a potential match to hash
55    pub hash_len: i32,
56    /// number of previous distance matches to check for future matches (defaults to 16)
57    pub num_last_distances_to_check: i32,
58    /// how much to weigh distance vs an extra byte of copy match when comparing possible copy srcs
59    pub literal_byte_score: i32,
60}
61
62#[derive(Clone, Debug)]
63pub struct BrotliEncoderParams {
64    pub dist: BrotliDistanceParams,
65    /// if this brotli file is generic, font or specifically text
66    pub mode: BrotliEncoderMode,
67    /// quality param between 0 and 11 (11 is smallest but takes longest to encode)
68    pub quality: i32,
69    pub q9_5: bool,
70    /// log of how big the ring buffer should be for copying prior data
71    pub lgwin: i32,
72    /// log of how often metablocks should be serialized
73    pub lgblock: i32,
74    /// how big the source file is (or 0 if no hint is provided)
75    pub size_hint: usize,
76    /// avoid serializing out priors for literal sections in the favor of decode speed
77    pub disable_literal_context_modeling: i32,
78    pub hasher: BrotliHasherParams,
79    /// produce an IR of the compression file
80    pub log_meta_block: bool,
81    /// attempt to detect how many bytes before the current byte generates the best prediction of it
82    /// * 0 = off (stride 1 always)
83    /// * 1 = on per 16th of a file
84    /// * 2 = on per block type switch
85    pub stride_detection_quality: u8,
86    /// if nonzero, will search for high entropy strings and log them differently to the IR
87    pub high_entropy_detection_quality: u8,
88    /// if nonzero it will search for the temporal locality and effectiveness of the priors
89    /// for literals. The best adaptation and forgetfulness will be logged per metablock to the IR
90    pub cdf_adaptation_detection: u8,
91    /// whether to search for whether the previous byte or the context_map are better predictors on a per-context-map basis
92    pub prior_bitmask_detection: u8,
93    /// for prior bitmask detection: stride_low, stride_speed, cm_low, cm_speed
94    pub literal_adaptation: [(u16, u16); 4],
95    pub large_window: bool,
96    /// avoid search for the best ndirect vs npostfix parameters for distance
97    pub avoid_distance_prefix_search: bool,
98    /// construct brotli in such a way that it may be concatenated with another brotli file using appropriate bit ops
99    pub catable: bool,
100    /// can use the dictionary (default yes unless catable is set)
101    pub use_dictionary: bool,
102    /// construct brotli in such a way that another concatable brotli file may be appended
103    pub appendable: bool,
104    /// include a magic number and version number and size_hint at the beginning
105    pub magic_number: bool,
106    /// prefer to compute the map of previously seen strings
107    /// just once for all the threads at the beginning, since they overlap significantly
108    pub favor_cpu_efficiency: bool,
109}
110
111impl Default for BrotliEncoderParams {
112    fn default() -> BrotliEncoderParams {
113        super::encode::BrotliEncoderInitParams()
114    }
115}
116
117#[derive(Clone, Copy, Default, PartialEq)]
118pub struct H9Opts {
119    pub literal_byte_score: u32,
120}
121pub enum HowPrepared {
122    ALREADY_PREPARED,
123    NEWLY_PREPARED,
124}
125#[derive(Clone, PartialEq)]
126pub struct Struct1 {
127    pub params: BrotliHasherParams,
128    pub is_prepared_: i32,
129    pub dict_num_lookups: usize,
130    pub dict_num_matches: usize,
131}
132
133fn LiteralSpreeLengthForSparseSearch(params: &BrotliEncoderParams) -> usize {
134    (if params.quality < 9 { 64i32 } else { 512i32 }) as usize
135}
136
137fn brotli_min_size_t(a: usize, b: usize) -> usize {
138    if a < b {
139        a
140    } else {
141        b
142    }
143}
144
145pub struct HasherSearchResult {
146    pub len: usize,
147    pub len_x_code: usize,
148    pub distance: usize,
149    pub score: u64,
150}
151
152pub trait CloneWithAlloc<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> {
153    fn clone_with_alloc(&self, m: &mut Alloc) -> Self;
154}
155
156pub trait AnyHasher {
157    fn Opts(&self) -> H9Opts;
158    fn GetHasherCommon(&mut self) -> &mut Struct1;
159    fn HashBytes(&self, data: &[u8]) -> usize;
160    fn HashTypeLength(&self) -> usize;
161    fn StoreLookahead(&self) -> usize;
162    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]);
163    fn FindLongestMatch(
164        &mut self,
165        dictionary: Option<&BrotliDictionary>,
166        dictionary_hash: &[u16],
167        data: &[u8],
168        ring_buffer_mask: usize,
169        distance_cache: &[i32],
170        cur_ix: usize,
171        max_length: usize,
172        max_backward: usize,
173        gap: usize,
174        max_distance: usize,
175        out: &mut HasherSearchResult,
176    ) -> bool;
177    fn Store(&mut self, data: &[u8], mask: usize, ix: usize);
178    fn Store4Vec4(&mut self, data: &[u8], mask: usize, ix: usize) {
179        for i in 0..4 {
180            self.Store(data, mask, ix + i * 4);
181        }
182    }
183    fn StoreEvenVec4(&mut self, data: &[u8], mask: usize, ix: usize) {
184        for i in 0..4 {
185            self.Store(data, mask, ix + i * 2);
186        }
187    }
188    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize);
189    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize);
190    fn Prepare(&mut self, one_shot: bool, input_size: usize, data: &[u8]) -> HowPrepared;
191    fn StitchToPreviousBlock(
192        &mut self,
193        num_bytes: usize,
194        position: usize,
195        ringbuffer: &[u8],
196        ringbuffer_mask: usize,
197    );
198}
199
200pub fn StitchToPreviousBlockInternal<T: AnyHasher>(
201    handle: &mut T,
202    num_bytes: usize,
203    position: usize,
204    ringbuffer: &[u8],
205    ringbuffer_mask: usize,
206) {
207    if num_bytes >= handle.HashTypeLength().wrapping_sub(1) && (position >= 3) {
208        handle.Store(ringbuffer, ringbuffer_mask, position.wrapping_sub(3));
209        handle.Store(ringbuffer, ringbuffer_mask, position.wrapping_sub(2));
210        handle.Store(ringbuffer, ringbuffer_mask, position.wrapping_sub(1));
211    }
212}
213
214pub fn StoreLookaheadThenStore<T: AnyHasher>(hasher: &mut T, size: usize, dict: &[u8]) {
215    let overlap = hasher.StoreLookahead().wrapping_sub(1);
216    if size > overlap {
217        hasher.BulkStoreRange(dict, !(0), 0, size - overlap);
218    }
219}
220
221pub trait BasicHashComputer {
222    fn HashBytes(&self, data: &[u8]) -> u32;
223    fn BUCKET_BITS(&self) -> i32;
224    fn USE_DICTIONARY(&self) -> i32;
225    fn BUCKET_SWEEP(&self) -> i32;
226}
227pub struct BasicHasher<Buckets: SliceWrapperMut<u32> + SliceWrapper<u32> + BasicHashComputer> {
228    pub GetHasherCommon: Struct1,
229    pub buckets_: Buckets,
230    pub h9_opts: H9Opts,
231}
232
233impl<A: SliceWrapperMut<u32> + SliceWrapper<u32> + BasicHashComputer> PartialEq<BasicHasher<A>>
234    for BasicHasher<A>
235{
236    fn eq(&self, other: &BasicHasher<A>) -> bool {
237        self.GetHasherCommon == other.GetHasherCommon
238            && self.h9_opts == other.h9_opts
239            && self.buckets_.slice() == other.buckets_.slice()
240    }
241}
242
243impl<T: SliceWrapperMut<u32> + SliceWrapper<u32> + BasicHashComputer> BasicHasher<T> {
244    fn StoreRangeOptBasic(
245        &mut self,
246        data: &[u8],
247        mask: usize,
248        ix_start: usize,
249        ix_end: usize,
250    ) -> usize {
251        let lookahead = 8;
252        if ix_end >= ix_start + lookahead * 2 {
253            let chunk_count = (ix_end - ix_start) / 4;
254            for chunk_id in 0..chunk_count {
255                let i = (ix_start + chunk_id * 4) & mask;
256                let word11 = data.split_at(i).1.split_at(11).0;
257                let mixed0 = self.HashBytes(word11);
258                let mixed1 = self.HashBytes(word11.split_at(1).1);
259                let mixed2 = self.HashBytes(word11.split_at(2).1);
260                let mixed3 = self.HashBytes(word11.split_at(3).1);
261                let off: u32 = (i >> 3).wrapping_rem(self.buckets_.BUCKET_SWEEP() as usize) as u32;
262                let offset0: usize = mixed0 + off as usize;
263                let offset1: usize = mixed1 + off as usize;
264                let offset2: usize = mixed2 + off as usize;
265                let offset3: usize = mixed3 + off as usize;
266                self.buckets_.slice_mut()[offset0] = i as u32;
267                self.buckets_.slice_mut()[offset1] = i as u32 + 1;
268                self.buckets_.slice_mut()[offset2] = i as u32 + 2;
269                self.buckets_.slice_mut()[offset3] = i as u32 + 3;
270            }
271            return ix_start + chunk_count * 4;
272        }
273        ix_start
274    }
275}
276pub struct H2Sub<AllocU32: alloc::Allocator<u32>> {
277    pub buckets_: AllocU32::AllocatedMemory, // 65537
278}
279impl<T: SliceWrapperMut<u32> + SliceWrapper<u32> + BasicHashComputer> AnyHasher for BasicHasher<T> {
280    #[inline(always)]
281    fn Opts(&self) -> H9Opts {
282        self.h9_opts
283    }
284    #[allow(unused_variables)]
285    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]) {}
286    #[inline(always)]
287    fn HashTypeLength(&self) -> usize {
288        8
289    }
290    #[inline(always)]
291    fn StoreLookahead(&self) -> usize {
292        8
293    }
294    fn StitchToPreviousBlock(
295        &mut self,
296        num_bytes: usize,
297        position: usize,
298        ringbuffer: &[u8],
299        ringbuffer_mask: usize,
300    ) {
301        StitchToPreviousBlockInternal(self, num_bytes, position, ringbuffer, ringbuffer_mask);
302    }
303    #[inline(always)]
304    fn GetHasherCommon(&mut self) -> &mut Struct1 {
305        &mut self.GetHasherCommon
306    }
307    #[inline(always)]
308    fn HashBytes(&self, data: &[u8]) -> usize {
309        self.buckets_.HashBytes(data) as usize
310    }
311    fn Store(&mut self, data: &[u8], mask: usize, ix: usize) {
312        let (_, data_window) = data.split_at((ix & mask));
313        let key: u32 = self.HashBytes(data_window) as u32;
314        let off: u32 = (ix >> 3).wrapping_rem(self.buckets_.BUCKET_SWEEP() as usize) as u32;
315        self.buckets_.slice_mut()[key.wrapping_add(off) as usize] = ix as u32;
316    }
317    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
318        for i in self.StoreRangeOptBasic(data, mask, ix_start, ix_end)..ix_end {
319            self.Store(data, mask, i);
320        }
321    }
322    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
323        self.StoreRange(data, mask, ix_start, ix_end);
324    }
325    fn Prepare(&mut self, one_shot: bool, input_size: usize, data: &[u8]) -> HowPrepared {
326        if self.GetHasherCommon.is_prepared_ != 0 {
327            return HowPrepared::ALREADY_PREPARED;
328        }
329        let partial_prepare_threshold = (4 << self.buckets_.BUCKET_BITS()) >> 7;
330        if one_shot && input_size <= partial_prepare_threshold {
331            for i in 0..input_size {
332                let key = self.HashBytes(&data[i..]);
333                let bs = self.buckets_.BUCKET_SWEEP() as usize;
334                for item in self.buckets_.slice_mut()[key..(key + bs)].iter_mut() {
335                    *item = 0;
336                }
337            }
338        } else {
339            for item in self.buckets_.slice_mut().iter_mut() {
340                *item = 0;
341            }
342        }
343        self.GetHasherCommon.is_prepared_ = 1;
344        HowPrepared::NEWLY_PREPARED
345    }
346
347    fn FindLongestMatch(
348        &mut self,
349        dictionary: Option<&BrotliDictionary>,
350        dictionary_hash: &[u16],
351        data: &[u8],
352        ring_buffer_mask: usize,
353        distance_cache: &[i32],
354        cur_ix: usize,
355        max_length: usize,
356        max_backward: usize,
357        gap: usize,
358        max_distance: usize,
359        out: &mut HasherSearchResult,
360    ) -> bool {
361        let opts = self.Opts();
362        let best_len_in: usize = out.len;
363        let cur_ix_masked: usize = cur_ix & ring_buffer_mask;
364        let key: u32 = self.HashBytes(&data[cur_ix_masked..]) as u32;
365        let mut compare_char: i32 = data[cur_ix_masked.wrapping_add(best_len_in)] as i32;
366        let mut best_score: u64 = out.score;
367        let mut best_len: usize = best_len_in;
368        let cached_backward: usize = distance_cache[0] as usize;
369        let mut prev_ix: usize = cur_ix.wrapping_sub(cached_backward);
370        let mut is_match_found = false;
371        out.len_x_code = 0usize;
372        if prev_ix < cur_ix {
373            prev_ix &= ring_buffer_mask as u32 as usize;
374            if compare_char == data[prev_ix.wrapping_add(best_len)] as i32 {
375                let len: usize = FindMatchLengthWithLimitMin4(
376                    &data[prev_ix..],
377                    &data[cur_ix_masked..],
378                    max_length,
379                );
380                if len != 0 {
381                    best_score = BackwardReferenceScoreUsingLastDistance(len, opts);
382                    best_len = len;
383                    out.len = len;
384                    out.distance = cached_backward;
385                    out.score = best_score;
386                    compare_char = data[cur_ix_masked.wrapping_add(best_len)] as i32;
387                    if self.buckets_.BUCKET_SWEEP() == 1i32 {
388                        self.buckets_.slice_mut()[key as usize] = cur_ix as u32;
389                        return true;
390                    } else {
391                        is_match_found = true;
392                    }
393                }
394            }
395        }
396        let bucket_sweep = self.buckets_.BUCKET_SWEEP();
397        if bucket_sweep == 1i32 {
398            prev_ix = self.buckets_.slice()[key as usize] as usize;
399            self.buckets_.slice_mut()[key as usize] = cur_ix as u32;
400            let backward: usize = cur_ix.wrapping_sub(prev_ix);
401            prev_ix &= ring_buffer_mask as u32 as usize;
402            if compare_char != data[prev_ix.wrapping_add(best_len_in)] as i32 {
403                return false;
404            }
405            if backward == 0usize || backward > max_backward {
406                return false;
407            }
408            let len: usize =
409                FindMatchLengthWithLimitMin4(&data[prev_ix..], &data[cur_ix_masked..], max_length);
410            if len != 0 {
411                out.len = len;
412                out.distance = backward;
413                out.score = BackwardReferenceScore(len, backward, opts);
414                return true;
415            }
416        } else {
417            for prev_ix_ref in
418                self.buckets_.slice().split_at(key as usize).1[..bucket_sweep as usize].iter()
419            {
420                let mut prev_ix = *prev_ix_ref as usize;
421                let backward: usize = cur_ix.wrapping_sub(prev_ix);
422                prev_ix &= ring_buffer_mask as u32 as usize;
423                if compare_char != data[prev_ix.wrapping_add(best_len)] as i32 {
424                    continue;
425                }
426                if backward == 0usize || backward > max_backward {
427                    continue;
428                }
429                let len = FindMatchLengthWithLimitMin4(
430                    &data[prev_ix..],
431                    &data[cur_ix_masked..],
432                    max_length,
433                );
434                if len != 0 {
435                    let score: u64 = BackwardReferenceScore(len, backward, opts);
436                    if best_score < score {
437                        best_score = score;
438                        best_len = len;
439                        out.len = best_len;
440                        out.distance = backward;
441                        out.score = score;
442                        compare_char = data[cur_ix_masked.wrapping_add(best_len)] as i32;
443                        is_match_found = true;
444                    }
445                }
446            }
447        }
448        if dictionary.is_some() && self.buckets_.USE_DICTIONARY() != 0 && !is_match_found {
449            is_match_found = SearchInStaticDictionary(
450                dictionary.unwrap(),
451                dictionary_hash,
452                self,
453                &data[cur_ix_masked..],
454                max_length,
455                max_backward.wrapping_add(gap),
456                max_distance,
457                out,
458                true,
459            );
460        }
461        self.buckets_.slice_mut()
462            [(key as usize).wrapping_add((cur_ix >> 3).wrapping_rem(bucket_sweep as usize))] =
463            cur_ix as u32;
464        is_match_found
465    }
466}
467impl<AllocU32: alloc::Allocator<u32>> BasicHashComputer for H2Sub<AllocU32> {
468    fn HashBytes(&self, data: &[u8]) -> u32 {
469        let h: u64 =
470            (BROTLI_UNALIGNED_LOAD64(data) << (64i32 - 8i32 * 5i32)).wrapping_mul(kHashMul64);
471        (h >> (64i32 - 16i32)) as u32
472    }
473    fn BUCKET_BITS(&self) -> i32 {
474        16
475    }
476    fn BUCKET_SWEEP(&self) -> i32 {
477        1
478    }
479    fn USE_DICTIONARY(&self) -> i32 {
480        1
481    }
482}
483impl<AllocU32: alloc::Allocator<u32>> SliceWrapperMut<u32> for H2Sub<AllocU32> {
484    fn slice_mut(&mut self) -> &mut [u32] {
485        return self.buckets_.slice_mut();
486    }
487}
488impl<AllocU32: alloc::Allocator<u32>> SliceWrapper<u32> for H2Sub<AllocU32> {
489    fn slice(&self) -> &[u32] {
490        return self.buckets_.slice();
491    }
492}
493pub struct H3Sub<AllocU32: alloc::Allocator<u32>> {
494    pub buckets_: AllocU32::AllocatedMemory, // 65538
495}
496impl<AllocU32: alloc::Allocator<u32>> SliceWrapperMut<u32> for H3Sub<AllocU32> {
497    fn slice_mut(&mut self) -> &mut [u32] {
498        return self.buckets_.slice_mut();
499    }
500}
501impl<AllocU32: alloc::Allocator<u32>> SliceWrapper<u32> for H3Sub<AllocU32> {
502    fn slice(&self) -> &[u32] {
503        return self.buckets_.slice();
504    }
505}
506impl<AllocU32: alloc::Allocator<u32>> BasicHashComputer for H3Sub<AllocU32> {
507    fn BUCKET_BITS(&self) -> i32 {
508        16
509    }
510    fn BUCKET_SWEEP(&self) -> i32 {
511        2
512    }
513    fn USE_DICTIONARY(&self) -> i32 {
514        0
515    }
516    fn HashBytes(&self, data: &[u8]) -> u32 {
517        let h: u64 =
518            (BROTLI_UNALIGNED_LOAD64(data) << (64i32 - 8i32 * 5i32)).wrapping_mul(kHashMul64);
519        (h >> (64i32 - 16i32)) as u32
520    }
521}
522pub struct H4Sub<AllocU32: alloc::Allocator<u32>> {
523    pub buckets_: AllocU32::AllocatedMemory, // 131076
524}
525impl<AllocU32: alloc::Allocator<u32>> BasicHashComputer for H4Sub<AllocU32> {
526    fn BUCKET_BITS(&self) -> i32 {
527        17
528    }
529    fn BUCKET_SWEEP(&self) -> i32 {
530        4
531    }
532    fn USE_DICTIONARY(&self) -> i32 {
533        1
534    }
535    fn HashBytes(&self, data: &[u8]) -> u32 {
536        let h: u64 =
537            (BROTLI_UNALIGNED_LOAD64(data) << (64i32 - 8i32 * 5i32)).wrapping_mul(kHashMul64);
538        (h >> (64i32 - 17i32)) as u32
539    }
540}
541impl<AllocU32: alloc::Allocator<u32>> SliceWrapperMut<u32> for H4Sub<AllocU32> {
542    fn slice_mut(&mut self) -> &mut [u32] {
543        return self.buckets_.slice_mut();
544    }
545}
546impl<AllocU32: alloc::Allocator<u32>> SliceWrapper<u32> for H4Sub<AllocU32> {
547    fn slice(&self) -> &[u32] {
548        return self.buckets_.slice();
549    }
550}
551pub struct H54Sub<AllocU32: alloc::Allocator<u32>> {
552    pub buckets_: AllocU32::AllocatedMemory,
553}
554impl<AllocU32: alloc::Allocator<u32>> BasicHashComputer for H54Sub<AllocU32> {
555    fn BUCKET_BITS(&self) -> i32 {
556        20
557    }
558    fn BUCKET_SWEEP(&self) -> i32 {
559        4
560    }
561    fn USE_DICTIONARY(&self) -> i32 {
562        0
563    }
564    fn HashBytes(&self, data: &[u8]) -> u32 {
565        let h: u64 =
566            (BROTLI_UNALIGNED_LOAD64(data) << (64i32 - 8i32 * 7i32)).wrapping_mul(kHashMul64);
567        (h >> (64i32 - 20i32)) as u32
568    }
569}
570
571impl<AllocU32: alloc::Allocator<u32>> SliceWrapperMut<u32> for H54Sub<AllocU32> {
572    fn slice_mut(&mut self) -> &mut [u32] {
573        return self.buckets_.slice_mut();
574    }
575}
576impl<AllocU32: alloc::Allocator<u32>> SliceWrapper<u32> for H54Sub<AllocU32> {
577    fn slice(&self) -> &[u32] {
578        return self.buckets_.slice();
579    }
580}
581pub const H9_BUCKET_BITS: usize = 15;
582pub const H9_BLOCK_BITS: usize = 8;
583pub const H9_NUM_LAST_DISTANCES_TO_CHECK: usize = 16;
584pub const H9_BLOCK_SIZE: usize = 1 << H9_BLOCK_BITS;
585const H9_BLOCK_MASK: usize = (1 << H9_BLOCK_BITS) - 1;
586
587impl H9Opts {
588    pub fn new(params: &BrotliHasherParams) -> H9Opts {
589        H9Opts {
590            literal_byte_score: if params.literal_byte_score != 0 {
591                params.literal_byte_score as u32
592            } else {
593                540
594            },
595        }
596    }
597}
598
599pub struct H9<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> {
600    pub num_: <Alloc as Allocator<u16>>::AllocatedMemory, //[u16;1 << H9_BUCKET_BITS],
601    pub buckets_: <Alloc as Allocator<u32>>::AllocatedMemory, //[u32; H9_BLOCK_SIZE << H9_BUCKET_BITS],
602    pub dict_search_stats_: Struct1,
603    pub h9_opts: H9Opts,
604}
605
606impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> PartialEq<H9<Alloc>> for H9<Alloc> {
607    fn eq(&self, other: &H9<Alloc>) -> bool {
608        self.dict_search_stats_ == other.dict_search_stats_
609            && self.num_.slice() == other.num_.slice()
610            && self.buckets_.slice() == other.buckets_.slice()
611            && self.h9_opts == other.h9_opts
612    }
613}
614
615fn adv_prepare_distance_cache(distance_cache: &mut [i32], num_distances: i32) {
616    if num_distances > 4i32 {
617        let last_distance: i32 = distance_cache[0];
618        distance_cache[4] = last_distance - 1i32;
619        distance_cache[5] = last_distance + 1i32;
620        distance_cache[6] = last_distance - 2i32;
621        distance_cache[7] = last_distance + 2i32;
622        distance_cache[8] = last_distance - 3i32;
623        distance_cache[9] = last_distance + 3i32;
624        if num_distances > 10i32 {
625            let next_last_distance: i32 = distance_cache[1];
626            distance_cache[10] = next_last_distance - 1i32;
627            distance_cache[11] = next_last_distance + 1i32;
628            distance_cache[12] = next_last_distance - 2i32;
629            distance_cache[13] = next_last_distance + 2i32;
630            distance_cache[14] = next_last_distance - 3i32;
631            distance_cache[15] = next_last_distance + 3i32;
632        }
633    }
634}
635
636pub const kDistanceCacheIndex: [u8; 16] = [0, 1, 2, 3, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1];
637
638pub const kDistanceCacheOffset: [i8; 16] = [0, 0, 0, 0, -1, 1, -2, 2, -3, 3, -1, 1, -2, 2, -3, 3];
639
640//const BROTLI_LITERAL_BYTE_SCORE: u64 = 540;
641const BROTLI_DISTANCE_BIT_PENALTY: u32 = 120;
642
643// Score must be positive after applying maximal penalty.
644const BROTLI_SCORE_BASE: u32 = (BROTLI_DISTANCE_BIT_PENALTY * 8 * 8/* sizeof usize*/);
645const kDistanceShortCodeCost: [u32; 16] = [
646    /* Repeat last */
647    BROTLI_SCORE_BASE + 60,
648    /* 2nd, 3rd, 4th last */
649    BROTLI_SCORE_BASE - 95,
650    BROTLI_SCORE_BASE - 117,
651    BROTLI_SCORE_BASE - 127,
652    /* Last with offset */
653    BROTLI_SCORE_BASE - 93,
654    BROTLI_SCORE_BASE - 93,
655    BROTLI_SCORE_BASE - 96,
656    BROTLI_SCORE_BASE - 96,
657    BROTLI_SCORE_BASE - 99,
658    BROTLI_SCORE_BASE - 99,
659    /* 2nd last with offset */
660    BROTLI_SCORE_BASE - 105,
661    BROTLI_SCORE_BASE - 105,
662    BROTLI_SCORE_BASE - 115,
663    BROTLI_SCORE_BASE - 115,
664    BROTLI_SCORE_BASE - 125,
665    BROTLI_SCORE_BASE - 125,
666];
667
668fn BackwardReferenceScoreH9(
669    copy_length: usize,
670    backward_reference_offset: usize,
671    h9_opts: H9Opts,
672) -> u64 {
673    (u64::from(BROTLI_SCORE_BASE)
674        .wrapping_add((h9_opts.literal_byte_score as u64).wrapping_mul(copy_length as u64))
675        .wrapping_sub(
676            (BROTLI_DISTANCE_BIT_PENALTY as u64)
677                .wrapping_mul(Log2FloorNonZero(backward_reference_offset as u64) as u64),
678        ))
679        >> 2
680}
681
682fn BackwardReferenceScoreUsingLastDistanceH9(
683    copy_length: usize,
684    distance_short_code: usize,
685    h9_opts: H9Opts,
686) -> u64 {
687    ((h9_opts.literal_byte_score as u64)
688        .wrapping_mul(copy_length as u64)
689        .wrapping_add(u64::from(kDistanceShortCodeCost[distance_short_code])))
690        >> 2
691}
692
693impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> AnyHasher for H9<Alloc> {
694    #[inline(always)]
695    fn Opts(&self) -> H9Opts {
696        self.h9_opts
697    }
698    #[inline(always)]
699    fn GetHasherCommon(&mut self) -> &mut Struct1 {
700        &mut self.dict_search_stats_
701    }
702    #[inline(always)]
703    fn HashBytes(&self, data: &[u8]) -> usize {
704        let h: u32 = BROTLI_UNALIGNED_LOAD32(data).wrapping_mul(kHashMul32);
705        let thirty_two: usize = 32;
706        (h >> (thirty_two.wrapping_sub(H9_BUCKET_BITS))) as usize
707    }
708    #[inline(always)]
709    fn HashTypeLength(&self) -> usize {
710        4
711    }
712    #[inline(always)]
713    fn StoreLookahead(&self) -> usize {
714        4
715    }
716    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]) {
717        let num_distances = H9_NUM_LAST_DISTANCES_TO_CHECK as i32;
718        adv_prepare_distance_cache(distance_cache, num_distances);
719    }
720    fn FindLongestMatch(
721        &mut self,
722        dictionary: Option<&BrotliDictionary>,
723        dictionary_hash: &[u16],
724        data: &[u8],
725        ring_buffer_mask: usize,
726        distance_cache: &[i32],
727        cur_ix: usize,
728        max_length: usize,
729        max_backward: usize,
730        gap: usize,
731        max_distance: usize,
732        out: &mut HasherSearchResult,
733    ) -> bool {
734        let best_len_in: usize = out.len;
735        let cur_ix_masked: usize = cur_ix & ring_buffer_mask;
736        let mut best_score: u64 = out.score;
737        let mut best_len: usize = best_len_in;
738        let mut is_match_found = false;
739        out.len_x_code = 0usize;
740        for i in 0..H9_NUM_LAST_DISTANCES_TO_CHECK {
741            let idx = kDistanceCacheIndex[i] as usize;
742            let backward =
743                (distance_cache[idx] as usize).wrapping_add(kDistanceCacheOffset[i] as usize);
744            let mut prev_ix = cur_ix.wrapping_sub(backward);
745            if prev_ix >= cur_ix {
746                continue;
747            }
748            if backward > max_backward {
749                continue;
750            }
751            prev_ix &= ring_buffer_mask;
752            if cur_ix_masked.wrapping_add(best_len) > ring_buffer_mask
753                || prev_ix.wrapping_add(best_len) > ring_buffer_mask
754                || data[cur_ix_masked.wrapping_add(best_len)]
755                    != data[prev_ix.wrapping_add(best_len)]
756            {
757                continue;
758            }
759            {
760                let len: usize =
761                    FindMatchLengthWithLimit(&data[prev_ix..], &data[cur_ix_masked..], max_length);
762                if len >= 3 || (len == 2 && i < 2) {
763                    let score = BackwardReferenceScoreUsingLastDistanceH9(len, i, self.h9_opts);
764                    if best_score < score {
765                        best_score = score;
766                        best_len = len;
767                        out.len = best_len;
768                        out.distance = backward;
769                        out.score = best_score;
770                        is_match_found = true;
771                    }
772                }
773            }
774        }
775        if max_length >= 4 && cur_ix_masked.wrapping_add(best_len) <= ring_buffer_mask {
776            let key = self.HashBytes(data.split_at(cur_ix_masked).1);
777            let bucket = &mut self
778                .buckets_
779                .slice_mut()
780                .split_at_mut(key << H9_BLOCK_BITS)
781                .1
782                .split_at_mut(H9_BLOCK_SIZE)
783                .0;
784            assert!(bucket.len() > H9_BLOCK_MASK);
785            assert_eq!(bucket.len(), H9_BLOCK_MASK + 1);
786            let self_num_key = &mut self.num_.slice_mut()[key];
787            let down = if *self_num_key > H9_BLOCK_SIZE as u16 {
788                (*self_num_key as usize) - H9_BLOCK_SIZE
789            } else {
790                0usize
791            };
792            let mut i: usize = *self_num_key as usize;
793            let mut prev_best_val = data[cur_ix_masked.wrapping_add(best_len)];
794            while i > down {
795                i -= 1;
796                let mut prev_ix = bucket[i & H9_BLOCK_MASK] as usize;
797                let backward = cur_ix.wrapping_sub(prev_ix);
798                if (backward > max_backward) {
799                    break;
800                }
801                prev_ix &= ring_buffer_mask;
802                if (prev_ix.wrapping_add(best_len) > ring_buffer_mask
803                    || prev_best_val != data[prev_ix.wrapping_add(best_len)])
804                {
805                    continue;
806                }
807                {
808                    let len = FindMatchLengthWithLimit(
809                        data.split_at(prev_ix).1,
810                        data.split_at(cur_ix_masked).1,
811                        max_length,
812                    );
813                    if (len >= 4) {
814                        /* Comparing for >= 3 does not change the semantics, but just saves
815                        for a few unnecessary binary logarithms in backward reference
816                        score, since we are not interested in such short matches. */
817                        let score = BackwardReferenceScoreH9(len, backward, self.h9_opts);
818                        if (best_score < score) {
819                            best_score = score;
820                            best_len = len;
821                            out.len = best_len;
822                            out.distance = backward;
823                            out.score = best_score;
824                            is_match_found = true;
825                            if cur_ix_masked.wrapping_add(best_len) > ring_buffer_mask {
826                                break;
827                            }
828                            prev_best_val = data[cur_ix_masked.wrapping_add(best_len)];
829                        }
830                    }
831                }
832            }
833            bucket[*self_num_key as usize & H9_BLOCK_MASK] = cur_ix as u32;
834            *self_num_key = self_num_key.wrapping_add(1);
835        }
836        if !is_match_found && dictionary.is_some() {
837            let (_, cur_data) = data.split_at(cur_ix_masked);
838            is_match_found = SearchInStaticDictionary(
839                dictionary.unwrap(),
840                dictionary_hash,
841                self,
842                cur_data,
843                max_length,
844                max_backward.wrapping_add(gap),
845                max_distance,
846                out,
847                false,
848            );
849        }
850        is_match_found
851    }
852
853    fn Store(&mut self, data: &[u8], mask: usize, ix: usize) {
854        let (_, data_window) = data.split_at((ix & mask));
855        let key: u32 = self.HashBytes(data_window) as u32;
856        let self_num_key = &mut self.num_.slice_mut()[key as usize];
857        let minor_ix: usize = (*self_num_key as usize & H9_BLOCK_MASK);
858        self.buckets_.slice_mut()[minor_ix.wrapping_add((key as usize) << H9_BLOCK_BITS)] =
859            ix as u32;
860        *self_num_key = self_num_key.wrapping_add(1);
861    }
862    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
863        for i in ix_start..ix_end {
864            self.Store(data, mask, i);
865        }
866    }
867    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
868        for i in ix_start..ix_end {
869            self.Store(data, mask, i);
870        }
871    }
872    fn Prepare(&mut self, _one_shot: bool, _input_size: usize, _data: &[u8]) -> HowPrepared {
873        if self.GetHasherCommon().is_prepared_ != 0 {
874            return HowPrepared::ALREADY_PREPARED;
875        }
876        for item in self.num_.slice_mut().iter_mut() {
877            *item = 0;
878        }
879        self.GetHasherCommon().is_prepared_ = 1;
880        HowPrepared::NEWLY_PREPARED
881    }
882    fn StitchToPreviousBlock(
883        &mut self,
884        num_bytes: usize,
885        position: usize,
886        ringbuffer: &[u8],
887        ringbuffer_mask: usize,
888    ) {
889        StitchToPreviousBlockInternal(self, num_bytes, position, ringbuffer, ringbuffer_mask)
890    }
891}
892
893pub trait AdvHashSpecialization: PartialEq<Self> {
894    fn get_hash_mask(&self) -> u64;
895    fn set_hash_mask(&mut self, params_hash_len: i32);
896    fn get_k_hash_mul(&self) -> u64;
897    fn HashTypeLength(&self) -> usize;
898    fn StoreLookahead(&self) -> usize;
899    fn load_and_mix_word(&self, data: &[u8]) -> u64;
900    fn hash_shift(&self) -> i32;
901    fn bucket_size(&self) -> u32;
902    fn block_mask(&self) -> u32;
903    fn block_size(&self) -> u32;
904    fn block_bits(&self) -> i32;
905}
906pub struct AdvHasher<
907    Specialization: AdvHashSpecialization + Sized + Clone,
908    Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
909> {
910    pub GetHasherCommon: Struct1,
911    pub specialization: Specialization, // contains hash_mask_
912    pub num: <Alloc as Allocator<u16>>::AllocatedMemory,
913    pub buckets: <Alloc as Allocator<u32>>::AllocatedMemory,
914    pub h9_opts: H9Opts,
915}
916
917impl<
918        Specialization: AdvHashSpecialization + Sized + Clone,
919        Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
920    > PartialEq<AdvHasher<Specialization, Alloc>> for AdvHasher<Specialization, Alloc>
921{
922    fn eq(&self, other: &Self) -> bool {
923        self.GetHasherCommon == other.GetHasherCommon
924            && self.specialization == other.specialization
925            && self.num.slice() == other.num.slice()
926            && self.buckets.slice() == other.buckets.slice()
927            && self.h9_opts == other.h9_opts
928    }
929}
930
931#[derive(Clone, PartialEq)]
932pub struct HQ5Sub {}
933impl AdvHashSpecialization for HQ5Sub {
934    #[inline(always)]
935    fn hash_shift(&self) -> i32 {
936        32i32 - 14 // 32 - bucket_bits
937    }
938    #[inline(always)]
939    fn bucket_size(&self) -> u32 {
940        1 << 14
941    }
942    #[inline(always)]
943    fn block_bits(&self) -> i32 {
944        4
945    }
946    #[inline(always)]
947    fn block_size(&self) -> u32 {
948        1 << 4
949    }
950    #[inline(always)]
951    fn block_mask(&self) -> u32 {
952        (1 << 4) - 1
953    }
954    #[inline(always)]
955    fn get_hash_mask(&self) -> u64 {
956        //return 0xffffffffffffffffu64;
957        0xffffffffu64 // make it 32 bit
958    }
959    #[inline(always)]
960    fn get_k_hash_mul(&self) -> u64 {
961        kHashMul32 as u64
962    }
963    #[inline(always)]
964    fn load_and_mix_word(&self, data: &[u8]) -> u64 {
965        (BROTLI_UNALIGNED_LOAD32(data) as u64 * self.get_k_hash_mul()) & self.get_hash_mask()
966    }
967    #[inline(always)]
968    fn set_hash_mask(&mut self, _params_hash_len: i32) {}
969    fn HashTypeLength(&self) -> usize {
970        4
971    }
972    #[inline(always)]
973    fn StoreLookahead(&self) -> usize {
974        4
975    }
976}
977
978#[derive(Clone, PartialEq)]
979pub struct HQ7Sub {}
980impl AdvHashSpecialization for HQ7Sub {
981    #[inline(always)]
982    fn hash_shift(&self) -> i32 {
983        32i32 - 15 // 32 - bucket_bits
984    }
985    #[inline(always)]
986    fn bucket_size(&self) -> u32 {
987        1 << 15
988    }
989    #[inline(always)]
990    fn block_bits(&self) -> i32 {
991        6
992    }
993    #[inline(always)]
994    fn block_size(&self) -> u32 {
995        1 << 6
996    }
997    #[inline(always)]
998    fn block_mask(&self) -> u32 {
999        (1 << 6) - 1
1000    }
1001    #[inline(always)]
1002    fn get_hash_mask(&self) -> u64 {
1003        //return 0xffffffffffffffffu64;
1004        0xffffffffu64 // make it 32 bit
1005    }
1006    #[inline(always)]
1007    fn get_k_hash_mul(&self) -> u64 {
1008        kHashMul32 as u64
1009    }
1010    #[inline(always)]
1011    fn load_and_mix_word(&self, data: &[u8]) -> u64 {
1012        (BROTLI_UNALIGNED_LOAD32(data) as u64 * self.get_k_hash_mul()) & self.get_hash_mask()
1013    }
1014    #[inline(always)]
1015    fn set_hash_mask(&mut self, _params_hash_len: i32) {}
1016    fn HashTypeLength(&self) -> usize {
1017        4
1018    }
1019    #[inline(always)]
1020    fn StoreLookahead(&self) -> usize {
1021        4
1022    }
1023}
1024
1025#[derive(Clone, PartialEq)]
1026pub struct H5Sub {
1027    pub hash_shift_: i32,
1028    pub bucket_size_: u32,
1029    pub block_mask_: u32,
1030    pub block_bits_: i32,
1031}
1032
1033impl AdvHashSpecialization for H5Sub {
1034    #[inline(always)]
1035    fn hash_shift(&self) -> i32 {
1036        self.hash_shift_
1037    }
1038    fn bucket_size(&self) -> u32 {
1039        self.bucket_size_
1040    }
1041    fn block_bits(&self) -> i32 {
1042        self.block_bits_
1043    }
1044    fn block_size(&self) -> u32 {
1045        1 << self.block_bits_
1046    }
1047    fn block_mask(&self) -> u32 {
1048        self.block_mask_
1049    }
1050    fn get_hash_mask(&self) -> u64 {
1051        //return 0xffffffffffffffffu64;
1052        0xffffffffu64 // make it 32 bit
1053    }
1054    fn get_k_hash_mul(&self) -> u64 {
1055        kHashMul32 as u64
1056    }
1057    fn load_and_mix_word(&self, data: &[u8]) -> u64 {
1058        (BROTLI_UNALIGNED_LOAD32(data) as u64 * self.get_k_hash_mul()) & self.get_hash_mask()
1059    }
1060    #[allow(unused_variables)]
1061    fn set_hash_mask(&mut self, params_hash_len: i32) {}
1062    fn HashTypeLength(&self) -> usize {
1063        4
1064    }
1065    fn StoreLookahead(&self) -> usize {
1066        4
1067    }
1068}
1069
1070#[derive(Clone, PartialEq)]
1071pub struct H6Sub {
1072    pub hash_mask: u64,
1073    pub hash_shift_: i32,
1074    pub bucket_size_: u32,
1075    pub block_mask_: u32,
1076    pub block_bits_: i32,
1077}
1078
1079impl AdvHashSpecialization for H6Sub {
1080    #[inline(always)]
1081    fn hash_shift(&self) -> i32 {
1082        self.hash_shift_
1083    }
1084    #[inline(always)]
1085    fn bucket_size(&self) -> u32 {
1086        self.bucket_size_
1087    }
1088    fn block_bits(&self) -> i32 {
1089        self.block_bits_
1090    }
1091    fn block_size(&self) -> u32 {
1092        1 << self.block_bits_
1093    }
1094    #[inline(always)]
1095    fn block_mask(&self) -> u32 {
1096        self.block_mask_
1097    }
1098    #[inline(always)]
1099    fn get_hash_mask(&self) -> u64 {
1100        self.hash_mask
1101    }
1102    #[inline(always)]
1103    fn set_hash_mask(&mut self, params_hash_len: i32) {
1104        self.hash_mask = !(0u32 as (u64)) >> (64i32 - 8i32 * params_hash_len);
1105    }
1106    #[inline(always)]
1107    fn get_k_hash_mul(&self) -> u64 {
1108        kHashMul64Long
1109    }
1110    #[inline(always)]
1111    fn load_and_mix_word(&self, data: &[u8]) -> u64 {
1112        (BROTLI_UNALIGNED_LOAD64(data) & self.get_hash_mask()).wrapping_mul(self.get_k_hash_mul())
1113    }
1114    #[inline(always)]
1115    fn HashTypeLength(&self) -> usize {
1116        8
1117    }
1118    #[inline(always)]
1119    fn StoreLookahead(&self) -> usize {
1120        8
1121    }
1122}
1123
1124fn BackwardReferencePenaltyUsingLastDistance(distance_short_code: usize) -> u64 {
1125    (39u64).wrapping_add((0x1ca10u64 >> (distance_short_code & 0xeusize) & 0xeu64))
1126}
1127
1128impl<
1129        Specialization: AdvHashSpecialization + Clone,
1130        Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
1131    > AdvHasher<Specialization, Alloc>
1132{
1133    // 7 opt
1134    // returns a new ix_start
1135    fn StoreRangeOptBatch(
1136        &mut self,
1137        data: &[u8],
1138        mask: usize,
1139        ix_start: usize,
1140        ix_end: usize,
1141    ) -> usize {
1142        let lookahead = self.specialization.StoreLookahead();
1143        if ix_end >= ix_start + lookahead * 2 && lookahead == 4 {
1144            let num = self.num.slice_mut();
1145            let buckets = self.buckets.slice_mut();
1146            assert_eq!(num.len(), self.specialization.bucket_size() as usize);
1147            assert_eq!(
1148                buckets.len(),
1149                self.specialization.bucket_size() as usize
1150                    * self.specialization.block_size() as usize
1151            );
1152            let shift = self.specialization.hash_shift();
1153            let chunk_count = (ix_end - ix_start) / 4;
1154            for chunk_id in 0..chunk_count {
1155                let i = (ix_start + chunk_id * 4) & mask;
1156                let ffffffff = 0xffffffff;
1157                let word = u64::from(data[i])
1158                    | (u64::from(data[i + 1]) << 8)
1159                    | (u64::from(data[i + 2]) << 16)
1160                    | (u64::from(data[i + 3]) << 24)
1161                    | (u64::from(data[i + 4]) << 32)
1162                    | (u64::from(data[i + 5]) << 40)
1163                    | (u64::from(data[i + 6]) << 48);
1164                let mixed0 = ((((word & ffffffff) * self.specialization.get_k_hash_mul())
1165                    & self.specialization.get_hash_mask())
1166                    >> shift) as usize;
1167                let mixed1 = (((((word >> 8) & ffffffff) * self.specialization.get_k_hash_mul())
1168                    & self.specialization.get_hash_mask())
1169                    >> shift) as usize;
1170                let mixed2 = (((((word >> 16) & ffffffff) * self.specialization.get_k_hash_mul())
1171                    & self.specialization.get_hash_mask())
1172                    >> shift) as usize;
1173                let mixed3 = (((((word >> 24) & ffffffff) * self.specialization.get_k_hash_mul())
1174                    & self.specialization.get_hash_mask())
1175                    >> shift) as usize;
1176                let mut num_ref0 = u32::from(num[mixed0]);
1177                num[mixed0] = num_ref0.wrapping_add(1) as u16;
1178                num_ref0 &= self.specialization.block_mask();
1179                let mut num_ref1 = u32::from(num[mixed1]);
1180                num[mixed1] = num_ref1.wrapping_add(1) as u16;
1181                num_ref1 &= self.specialization.block_mask();
1182                let mut num_ref2 = u32::from(num[mixed2]);
1183                num[mixed2] = num_ref2.wrapping_add(1) as u16;
1184                num_ref2 &= self.specialization.block_mask();
1185                let mut num_ref3 = u32::from(num[mixed3]);
1186                num[mixed3] = num_ref3.wrapping_add(1) as u16;
1187                num_ref3 &= self.specialization.block_mask();
1188                let offset0: usize =
1189                    (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1190                let offset1: usize =
1191                    (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1192                let offset2: usize =
1193                    (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1194                let offset3: usize =
1195                    (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1196                buckets[offset0] = (i) as u32;
1197                buckets[offset1] = (i + 1) as u32;
1198                buckets[offset2] = (i + 2) as u32;
1199                buckets[offset3] = (i + 3) as u32;
1200            }
1201            return ix_start + chunk_count * 4;
1202        }
1203        ix_start
1204    }
1205
1206    fn BulkStoreRangeOptMemFetch(
1207        &mut self,
1208        data: &[u8],
1209        mask: usize,
1210        ix_start: usize,
1211        ix_end: usize,
1212    ) -> usize {
1213        const REG_SIZE: usize = 32usize;
1214        let lookahead = self.specialization.StoreLookahead();
1215        if mask == !0 && ix_end > ix_start + REG_SIZE && lookahead == 4 {
1216            const lookahead4: usize = 4;
1217            assert_eq!(lookahead4, lookahead);
1218            let mut data64 = [0u8; REG_SIZE + lookahead4 - 1];
1219            let del = (ix_end - ix_start) / REG_SIZE;
1220            let num = self.num.slice_mut();
1221            let buckets = self.buckets.slice_mut();
1222            assert_eq!(num.len(), self.specialization.bucket_size() as usize);
1223            assert_eq!(
1224                buckets.len(),
1225                self.specialization.bucket_size() as usize
1226                    * self.specialization.block_size() as usize
1227            );
1228            let shift = self.specialization.hash_shift();
1229            for chunk_id in 0..del {
1230                let ix_offset = ix_start + chunk_id * REG_SIZE;
1231                data64[..REG_SIZE + lookahead4 - 1].clone_from_slice(
1232                    data.split_at(ix_offset)
1233                        .1
1234                        .split_at(REG_SIZE + lookahead4 - 1)
1235                        .0,
1236                );
1237                for quad_index in 0..(REG_SIZE >> 2) {
1238                    let i = quad_index << 2;
1239                    let ffffffff = 0xffffffff;
1240                    let word = u64::from(data64[i])
1241                        | (u64::from(data64[i + 1]) << 8)
1242                        | (u64::from(data64[i + 2]) << 16)
1243                        | (u64::from(data64[i + 3]) << 24)
1244                        | (u64::from(data64[i + 4]) << 32)
1245                        | (u64::from(data64[i + 5]) << 40)
1246                        | (u64::from(data64[i + 6]) << 48);
1247                    let mixed0 = ((((word & ffffffff) * self.specialization.get_k_hash_mul())
1248                        & self.specialization.get_hash_mask())
1249                        >> shift) as usize;
1250                    let mixed1 = (((((word >> 8) & ffffffff)
1251                        * self.specialization.get_k_hash_mul())
1252                        & self.specialization.get_hash_mask())
1253                        >> shift) as usize;
1254                    let mixed2 = (((((word >> 16) & ffffffff)
1255                        * self.specialization.get_k_hash_mul())
1256                        & self.specialization.get_hash_mask())
1257                        >> shift) as usize;
1258                    let mixed3 = (((((word >> 24) & ffffffff)
1259                        * self.specialization.get_k_hash_mul())
1260                        & self.specialization.get_hash_mask())
1261                        >> shift) as usize;
1262                    let mut num_ref0 = u32::from(num[mixed0]);
1263                    num[mixed0] = num_ref0.wrapping_add(1) as u16;
1264                    num_ref0 &= self.specialization.block_mask();
1265                    let mut num_ref1 = u32::from(num[mixed1]);
1266                    num[mixed1] = num_ref1.wrapping_add(1) as u16;
1267                    num_ref1 &= self.specialization.block_mask();
1268                    let mut num_ref2 = u32::from(num[mixed2]);
1269                    num[mixed2] = num_ref2.wrapping_add(1) as u16;
1270                    num_ref2 &= self.specialization.block_mask();
1271                    let mut num_ref3 = u32::from(num[mixed3]);
1272                    num[mixed3] = num_ref3.wrapping_add(1) as u16;
1273                    num_ref3 &= self.specialization.block_mask();
1274                    let offset0: usize =
1275                        (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1276                    let offset1: usize =
1277                        (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1278                    let offset2: usize =
1279                        (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1280                    let offset3: usize =
1281                        (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1282                    buckets[offset0] = (ix_offset + i) as u32;
1283                    buckets[offset1] = (ix_offset + i + 1) as u32;
1284                    buckets[offset2] = (ix_offset + i + 2) as u32;
1285                    buckets[offset3] = (ix_offset + i + 3) as u32;
1286                }
1287            }
1288            return ix_start + del * REG_SIZE;
1289        }
1290        ix_start
1291    }
1292    fn BulkStoreRangeOptMemFetchLazyDupeUpdate(
1293        &mut self,
1294        data: &[u8],
1295        mask: usize,
1296        ix_start: usize,
1297        ix_end: usize,
1298    ) -> usize {
1299        const REG_SIZE: usize = 32usize;
1300        let lookahead = self.specialization.StoreLookahead();
1301        if mask == !0 && ix_end > ix_start + REG_SIZE && lookahead == 4 {
1302            const lookahead4: usize = 4;
1303            assert_eq!(lookahead4, lookahead);
1304            let mut data64 = [0u8; REG_SIZE + lookahead4];
1305            let del = (ix_end - ix_start) / REG_SIZE;
1306            let num = self.num.slice_mut();
1307            let buckets = self.buckets.slice_mut();
1308            assert_eq!(num.len(), self.specialization.bucket_size() as usize);
1309            assert_eq!(
1310                buckets.len(),
1311                self.specialization.bucket_size() as usize
1312                    * self.specialization.block_size() as usize
1313            );
1314            let shift = self.specialization.hash_shift();
1315            for chunk_id in 0..del {
1316                let ix_offset = ix_start + chunk_id * REG_SIZE;
1317                data64[..REG_SIZE + lookahead4]
1318                    .clone_from_slice(data.split_at(ix_offset).1.split_at(REG_SIZE + lookahead4).0);
1319                for quad_index in 0..(REG_SIZE >> 2) {
1320                    let i = quad_index << 2;
1321                    let ffffffff = 0xffffffff;
1322                    let word = u64::from(data64[i])
1323                        | (u64::from(data64[i + 1]) << 8)
1324                        | (u64::from(data64[i + 2]) << 16)
1325                        | (u64::from(data64[i + 3]) << 24)
1326                        | (u64::from(data64[i + 4]) << 32)
1327                        | (u64::from(data64[i + 5]) << 40)
1328                        | (u64::from(data64[i + 6]) << 48);
1329                    let mixed0 = ((((word & ffffffff) * self.specialization.get_k_hash_mul())
1330                        & self.specialization.get_hash_mask())
1331                        >> shift) as usize;
1332                    let mixed1 = (((((word >> 8) & ffffffff)
1333                        * self.specialization.get_k_hash_mul())
1334                        & self.specialization.get_hash_mask())
1335                        >> shift) as usize;
1336                    let mixed2 = (((((word >> 16) & ffffffff)
1337                        * self.specialization.get_k_hash_mul())
1338                        & self.specialization.get_hash_mask())
1339                        >> shift) as usize;
1340                    let mixed3 = (((((word >> 24) & ffffffff)
1341                        * self.specialization.get_k_hash_mul())
1342                        & self.specialization.get_hash_mask())
1343                        >> shift) as usize;
1344                    let mut num_ref0 = u32::from(num[mixed0]);
1345                    let mut num_ref1 = u32::from(num[mixed1]);
1346                    let mut num_ref2 = u32::from(num[mixed2]);
1347                    let mut num_ref3 = u32::from(num[mixed3]);
1348                    num[mixed0] = num_ref0.wrapping_add(1) as u16;
1349                    num[mixed1] = num_ref1.wrapping_add(1) as u16;
1350                    num[mixed2] = num_ref2.wrapping_add(1) as u16;
1351                    num[mixed3] = num_ref3.wrapping_add(1) as u16;
1352                    num_ref0 &= self.specialization.block_mask();
1353                    num_ref1 &= self.specialization.block_mask();
1354                    num_ref2 &= self.specialization.block_mask();
1355                    num_ref3 &= self.specialization.block_mask();
1356                    let offset0: usize =
1357                        (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1358                    let offset1: usize =
1359                        (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1360                    let offset2: usize =
1361                        (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1362                    let offset3: usize =
1363                        (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1364                    buckets[offset0] = (ix_offset + i) as u32;
1365                    buckets[offset1] = (ix_offset + i + 1) as u32;
1366                    buckets[offset2] = (ix_offset + i + 2) as u32;
1367                    buckets[offset3] = (ix_offset + i + 3) as u32;
1368                }
1369            }
1370            return ix_start + del * REG_SIZE;
1371        }
1372        ix_start
1373    }
1374    fn BulkStoreRangeOptRandomDupeUpdate(
1375        &mut self,
1376        data: &[u8],
1377        mask: usize,
1378        ix_start: usize,
1379        ix_end: usize,
1380    ) -> usize {
1381        const REG_SIZE: usize = 32usize;
1382        let lookahead = self.specialization.StoreLookahead();
1383        if mask == !0 && ix_end > ix_start + REG_SIZE && lookahead == 4 {
1384            const lookahead4: usize = 4;
1385            assert_eq!(lookahead4, lookahead);
1386            let mut data64 = [0u8; REG_SIZE + lookahead4];
1387            let del = (ix_end - ix_start) / REG_SIZE;
1388            let num = self.num.slice_mut();
1389            let buckets = self.buckets.slice_mut();
1390            assert_eq!(num.len(), self.specialization.bucket_size() as usize);
1391            assert_eq!(
1392                buckets.len(),
1393                self.specialization.bucket_size() as usize
1394                    * self.specialization.block_size() as usize
1395            );
1396            let shift = self.specialization.hash_shift();
1397            for chunk_id in 0..del {
1398                let ix_offset = ix_start + chunk_id * REG_SIZE;
1399                data64[..REG_SIZE + lookahead4]
1400                    .clone_from_slice(data.split_at(ix_offset).1.split_at(REG_SIZE + lookahead4).0);
1401                for i in 0..REG_SIZE {
1402                    let mixed_word = ((u32::from(data64[i])
1403                        | (u32::from(data64[i + 1]) << 8)
1404                        | (u32::from(data64[i + 2]) << 16)
1405                        | (u32::from(data64[i + 3]) << 24))
1406                        as u64
1407                        * self.specialization.get_k_hash_mul())
1408                        & self.specialization.get_hash_mask();
1409                    let key = mixed_word >> shift;
1410                    let minor_ix: usize = chunk_id & self.specialization.block_mask() as usize; //   *num_ref as usize & self.specialization.block_mask() as usize; //GIGANTIC HAX: overwrite firsst option
1411                    let offset: usize =
1412                        minor_ix + (key << self.specialization.block_bits()) as usize;
1413                    buckets[offset] = (ix_offset + i) as u32;
1414                }
1415            }
1416            for (bucket_index, num_ref) in num.iter_mut().enumerate() {
1417                let region = buckets
1418                    .split_at_mut(bucket_index << self.specialization.block_bits())
1419                    .1
1420                    .split_at_mut(self.specialization.block_size() as usize)
1421                    .0;
1422                let mut lnum = 0usize;
1423                for block_index in 0..self.specialization.block_size() as usize {
1424                    if region[block_index] != 0 {
1425                        let byte_addr = region[block_index];
1426                        region[lnum] = byte_addr;
1427                        lnum += 1;
1428                    }
1429                }
1430                *num_ref = lnum as u16;
1431            }
1432            return ix_start + del * REG_SIZE;
1433        }
1434        ix_start
1435    }
1436}
1437
1438impl<
1439        Specialization: AdvHashSpecialization + Clone,
1440        Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
1441    > AnyHasher for AdvHasher<Specialization, Alloc>
1442{
1443    fn Opts(&self) -> H9Opts {
1444        self.h9_opts
1445    }
1446    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]) {
1447        let num_distances = self.GetHasherCommon.params.num_last_distances_to_check;
1448        adv_prepare_distance_cache(distance_cache, num_distances);
1449    }
1450    fn StitchToPreviousBlock(
1451        &mut self,
1452        num_bytes: usize,
1453        position: usize,
1454        ringbuffer: &[u8],
1455        ringbuffer_mask: usize,
1456    ) {
1457        StitchToPreviousBlockInternal(self, num_bytes, position, ringbuffer, ringbuffer_mask);
1458    }
1459    fn Prepare(&mut self, one_shot: bool, input_size: usize, data: &[u8]) -> HowPrepared {
1460        if self.GetHasherCommon.is_prepared_ != 0 {
1461            return HowPrepared::ALREADY_PREPARED;
1462        }
1463        let partial_prepare_threshold = self.specialization.bucket_size() as usize >> 6;
1464        if one_shot && input_size <= partial_prepare_threshold {
1465            for i in 0..input_size {
1466                let key = self.HashBytes(&data[i..]);
1467                self.num.slice_mut()[key] = 0;
1468            }
1469        } else {
1470            for item in
1471                self.num.slice_mut()[..(self.specialization.bucket_size() as usize)].iter_mut()
1472            {
1473                *item = 0;
1474            }
1475        }
1476        self.GetHasherCommon.is_prepared_ = 1;
1477        HowPrepared::NEWLY_PREPARED
1478    }
1479
1480    fn GetHasherCommon(&mut self) -> &mut Struct1 {
1481        &mut self.GetHasherCommon
1482    }
1483    fn HashTypeLength(&self) -> usize {
1484        self.specialization.HashTypeLength()
1485    }
1486    fn StoreLookahead(&self) -> usize {
1487        self.specialization.StoreLookahead()
1488    }
1489    fn HashBytes(&self, data: &[u8]) -> usize {
1490        let shift = self.specialization.hash_shift();
1491        let h: u64 = self.specialization.load_and_mix_word(data);
1492        (h >> shift) as u32 as usize
1493    }
1494    fn StoreEvenVec4(&mut self, data: &[u8], mask: usize, ix: usize) {
1495        if self.specialization.StoreLookahead() != 4 {
1496            for i in 0..4 {
1497                self.Store(data, mask, ix + i * 2);
1498            }
1499            return;
1500        }
1501        let shift = self.specialization.hash_shift();
1502        let num = self.num.slice_mut();
1503        let buckets = self.buckets.slice_mut();
1504        let li = ix & mask;
1505        let lword = u64::from(data[li])
1506            | (u64::from(data[li + 1]) << 8)
1507            | (u64::from(data[li + 2]) << 16)
1508            | (u64::from(data[li + 3]) << 24)
1509            | (u64::from(data[li + 4]) << 32)
1510            | (u64::from(data[li + 5]) << 40)
1511            | (u64::from(data[li + 6]) << 48)
1512            | (u64::from(data[li + 7]) << 56);
1513        let hi = (ix + 8) & mask;
1514        let hword = u64::from(data[hi]) | (u64::from(data[hi + 1]) << 8);
1515        let mixed0 = ((((lword & 0xffffffff) * self.specialization.get_k_hash_mul())
1516            & self.specialization.get_hash_mask())
1517            >> shift) as usize;
1518        let mixed1 = (((((lword >> 16) & 0xffffffff) * self.specialization.get_k_hash_mul())
1519            & self.specialization.get_hash_mask())
1520            >> shift) as usize;
1521        let mixed2 = (((((lword >> 32) & 0xffffffff) * self.specialization.get_k_hash_mul())
1522            & self.specialization.get_hash_mask())
1523            >> shift) as usize;
1524        let mixed3 = ((((((hword & 0xffff) << 16) | ((lword >> 48) & 0xffff))
1525            * self.specialization.get_k_hash_mul())
1526            & self.specialization.get_hash_mask())
1527            >> shift) as usize;
1528        let mut num_ref0 = u32::from(num[mixed0]);
1529        num[mixed0] = num_ref0.wrapping_add(1) as u16;
1530        num_ref0 &= self.specialization.block_mask();
1531        let mut num_ref1 = u32::from(num[mixed1]);
1532        num[mixed1] = num_ref1.wrapping_add(1) as u16;
1533        num_ref1 &= self.specialization.block_mask();
1534        let mut num_ref2 = u32::from(num[mixed2]);
1535        num[mixed2] = num_ref2.wrapping_add(1) as u16;
1536        num_ref2 &= self.specialization.block_mask();
1537        let mut num_ref3 = u32::from(num[mixed3]);
1538        num[mixed3] = num_ref3.wrapping_add(1) as u16;
1539        num_ref3 &= self.specialization.block_mask();
1540        let offset0: usize = (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1541        let offset1: usize = (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1542        let offset2: usize = (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1543        let offset3: usize = (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1544        buckets[offset0] = ix as u32;
1545        buckets[offset1] = (ix + 2) as u32;
1546        buckets[offset2] = (ix + 4) as u32;
1547        buckets[offset3] = (ix + 6) as u32;
1548    }
1549    fn Store4Vec4(&mut self, data: &[u8], mask: usize, ix: usize) {
1550        if self.specialization.StoreLookahead() != 4 {
1551            for i in 0..4 {
1552                self.Store(data, mask, ix + i * 4);
1553            }
1554            return;
1555        }
1556        let shift = self.specialization.hash_shift();
1557        let num = self.num.slice_mut();
1558        let buckets = self.buckets.slice_mut();
1559        let li = ix & mask;
1560        let llword = u32::from(data[li])
1561            | (u32::from(data[li + 1]) << 8)
1562            | (u32::from(data[li + 2]) << 16)
1563            | (u32::from(data[li + 3]) << 24);
1564        let luword = u32::from(data[li + 4])
1565            | (u32::from(data[li + 5]) << 8)
1566            | (u32::from(data[li + 6]) << 16)
1567            | (u32::from(data[li + 7]) << 24);
1568        let ui = (ix + 8) & mask;
1569        let ulword = u32::from(data[ui])
1570            | (u32::from(data[ui + 1]) << 8)
1571            | (u32::from(data[ui + 2]) << 16)
1572            | (u32::from(data[ui + 3]) << 24);
1573
1574        let uuword = u32::from(data[ui + 4])
1575            | (u32::from(data[ui + 5]) << 8)
1576            | (u32::from(data[ui + 6]) << 16)
1577            | (u32::from(data[ui + 7]) << 24);
1578
1579        let mixed0 = (((u64::from(llword) * self.specialization.get_k_hash_mul())
1580            & self.specialization.get_hash_mask())
1581            >> shift) as usize;
1582        let mixed1 = (((u64::from(luword) * self.specialization.get_k_hash_mul())
1583            & self.specialization.get_hash_mask())
1584            >> shift) as usize;
1585        let mixed2 = (((u64::from(ulword) * self.specialization.get_k_hash_mul())
1586            & self.specialization.get_hash_mask())
1587            >> shift) as usize;
1588        let mixed3 = (((u64::from(uuword) * self.specialization.get_k_hash_mul())
1589            & self.specialization.get_hash_mask())
1590            >> shift) as usize;
1591        let mut num_ref0 = u32::from(num[mixed0]);
1592        num[mixed0] = num_ref0.wrapping_add(1) as u16;
1593        num_ref0 &= self.specialization.block_mask();
1594        let mut num_ref1 = u32::from(num[mixed1]);
1595        num[mixed1] = num_ref1.wrapping_add(1) as u16;
1596        num_ref1 &= self.specialization.block_mask();
1597        let mut num_ref2 = u32::from(num[mixed2]);
1598        num[mixed2] = num_ref2.wrapping_add(1) as u16;
1599        num_ref2 &= self.specialization.block_mask();
1600        let mut num_ref3 = u32::from(num[mixed3]);
1601        num[mixed3] = num_ref3.wrapping_add(1) as u16;
1602        num_ref3 &= self.specialization.block_mask();
1603        let offset0: usize = (mixed0 << self.specialization.block_bits()) + num_ref0 as usize;
1604        let offset1: usize = (mixed1 << self.specialization.block_bits()) + num_ref1 as usize;
1605        let offset2: usize = (mixed2 << self.specialization.block_bits()) + num_ref2 as usize;
1606        let offset3: usize = (mixed3 << self.specialization.block_bits()) + num_ref3 as usize;
1607        buckets[offset0] = ix as u32;
1608        buckets[offset1] = (ix + 4) as u32;
1609        buckets[offset2] = (ix + 8) as u32;
1610        buckets[offset3] = (ix + 12) as u32;
1611    }
1612    fn Store(&mut self, data: &[u8], mask: usize, ix: usize) {
1613        let (_, data_window) = data.split_at((ix & mask));
1614        let key: u32 = self.HashBytes(data_window) as u32;
1615        let minor_ix: usize =
1616            (self.num.slice()[(key as usize)] as u32 & self.specialization.block_mask()) as usize;
1617        let offset: usize =
1618            minor_ix.wrapping_add((key << self.specialization.block_bits()) as usize);
1619        self.buckets.slice_mut()[offset] = ix as u32;
1620        {
1621            let _lhs = &mut self.num.slice_mut()[(key as usize)];
1622            *_lhs = (*_lhs as i32 + 1) as u16;
1623        }
1624    }
1625    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
1626        for i in self.StoreRangeOptBatch(data, mask, ix_start, ix_end)..ix_end {
1627            self.Store(data, mask, i);
1628        }
1629    }
1630    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, mut ix_start: usize, ix_end: usize) {
1631        /*
1632        if ix_start + 4096 < ix_end {
1633          for vec_offset in 0..(ix_end - ix_start - 4096) / 16 {
1634            self.Store4Vec4(data, mask, ix_start + vec_offset * 16);
1635          }
1636          ix_start += 16 * ((ix_end - ix_start - 4096) / 16);
1637        }
1638        if ix_start + 512 < ix_end {
1639          for vec_offset in 0..(ix_end - ix_start - 512) / 8 {
1640            self.StoreEvenVec4(data, mask, ix_start + vec_offset * 8);
1641            //self.StoreRange(data, mask, ix_start + vec_offset * 8, ix_start + (1+ vec_offset) * 8);
1642          }
1643          ix_start += 8 * ((ix_end - ix_start - 512) / 8);
1644        }
1645         */
1646        ix_start = self.BulkStoreRangeOptMemFetch(data, mask, ix_start, ix_end);
1647        for i in ix_start..ix_end {
1648            self.Store(data, mask, i);
1649        }
1650    }
1651
1652    fn FindLongestMatch(
1653        &mut self,
1654        dictionary: Option<&BrotliDictionary>,
1655        dictionary_hash: &[u16],
1656        data: &[u8],
1657        ring_buffer_mask: usize,
1658        distance_cache: &[i32],
1659        cur_ix: usize,
1660        max_length: usize,
1661        max_backward: usize,
1662        gap: usize,
1663        max_distance: usize,
1664        out: &mut HasherSearchResult,
1665    ) -> bool {
1666        let opts = self.Opts();
1667        let cur_ix_masked: usize = cur_ix & ring_buffer_mask;
1668        let mut is_match_found = false;
1669        let mut best_score: u64 = out.score;
1670        let mut best_len: usize = out.len;
1671        let mut i: usize;
1672        out.len = 0usize;
1673        out.len_x_code = 0usize;
1674        i = 0usize;
1675        let cur_data = data.split_at(cur_ix_masked).1;
1676        while i < self.GetHasherCommon.params.num_last_distances_to_check as usize {
1677            'continue45: loop {
1678                {
1679                    let backward: usize = distance_cache[i] as usize;
1680                    let mut prev_ix: usize = cur_ix.wrapping_sub(backward);
1681                    if prev_ix >= cur_ix {
1682                        break 'continue45;
1683                    }
1684                    if backward > max_backward {
1685                        break 'continue45;
1686                    }
1687                    prev_ix &= ring_buffer_mask;
1688                    if (cur_ix_masked.wrapping_add(best_len) > ring_buffer_mask
1689                        || prev_ix.wrapping_add(best_len) > ring_buffer_mask
1690                        || cur_data[best_len] != data[prev_ix.wrapping_add(best_len)])
1691                    {
1692                        break 'continue45;
1693                    }
1694                    let prev_data = data.split_at(prev_ix).1;
1695
1696                    let len: usize = FindMatchLengthWithLimit(prev_data, cur_data, max_length);
1697                    if len >= 3usize || len == 2usize && (i < 2usize) {
1698                        let mut score: u64 = BackwardReferenceScoreUsingLastDistance(len, opts);
1699                        if best_score < score {
1700                            if i != 0usize {
1701                                score = score
1702                                    .wrapping_sub(BackwardReferencePenaltyUsingLastDistance(i));
1703                            }
1704                            if best_score < score {
1705                                best_score = score;
1706                                best_len = len;
1707                                out.len = best_len;
1708                                out.distance = backward;
1709                                out.score = best_score;
1710                                is_match_found = true;
1711                            }
1712                        }
1713                    }
1714                }
1715                break;
1716            }
1717            i = i.wrapping_add(1);
1718        }
1719        {
1720            let key: u32 = self.HashBytes(cur_data) as u32;
1721            let common_block_bits = self.specialization.block_bits();
1722            let num_ref_mut = &mut self.num.slice_mut()[key as usize];
1723            let num_copy = *num_ref_mut;
1724            let bucket: &mut [u32] = self
1725                .buckets
1726                .slice_mut()
1727                .split_at_mut((key << common_block_bits) as usize)
1728                .1
1729                .split_at_mut(self.specialization.block_size() as usize)
1730                .0;
1731            assert!(bucket.len() > self.specialization.block_mask() as usize);
1732            if num_copy != 0 {
1733                let down: usize = core::cmp::max(
1734                    i32::from(num_copy) - self.specialization.block_size() as i32,
1735                    0,
1736                ) as usize;
1737                i = num_copy as usize;
1738                while i > down {
1739                    i -= 1;
1740                    let mut prev_ix =
1741                        bucket[i & self.specialization.block_mask() as usize] as usize;
1742                    let backward = cur_ix.wrapping_sub(prev_ix);
1743                    prev_ix &= ring_buffer_mask;
1744                    if (cur_ix_masked.wrapping_add(best_len) > ring_buffer_mask
1745                        || prev_ix.wrapping_add(best_len) > ring_buffer_mask
1746                        || cur_data[best_len] != data[prev_ix.wrapping_add(best_len)])
1747                    {
1748                        if backward > max_backward {
1749                            break;
1750                        }
1751                        continue;
1752                    }
1753                    if backward > max_backward {
1754                        break;
1755                    }
1756                    let prev_data = data.split_at(prev_ix).1;
1757                    let len = FindMatchLengthWithLimitMin4(prev_data, cur_data, max_length);
1758                    if len != 0 {
1759                        let score: u64 = BackwardReferenceScore(len, backward, opts);
1760                        if best_score < score {
1761                            best_score = score;
1762                            best_len = len;
1763                            out.len = best_len;
1764                            out.distance = backward;
1765                            out.score = best_score;
1766                            is_match_found = true;
1767                        }
1768                    }
1769                }
1770            }
1771            bucket[((num_copy as u32 & (self).specialization.block_mask()) as usize)] =
1772                cur_ix as u32;
1773            *num_ref_mut = num_ref_mut.wrapping_add(1);
1774        }
1775        if !is_match_found && dictionary.is_some() {
1776            let (_, cur_data) = data.split_at(cur_ix_masked);
1777            is_match_found = SearchInStaticDictionary(
1778                dictionary.unwrap(),
1779                dictionary_hash,
1780                self,
1781                cur_data,
1782                max_length,
1783                max_backward.wrapping_add(gap),
1784                max_distance,
1785                out,
1786                false,
1787            );
1788        }
1789        is_match_found
1790    }
1791}
1792
1793pub struct BankH40 {
1794    pub slots: [SlotH40; 65536],
1795}
1796
1797pub struct BankH41 {
1798    pub slots: [SlotH41; 65536],
1799}
1800
1801pub struct BankH42 {
1802    pub slots: [SlotH42; 512],
1803}
1804
1805pub struct SlotH40 {
1806    pub delta: u16,
1807    pub next: u16,
1808}
1809pub struct SlotH41 {
1810    pub delta: u16,
1811    pub next: u16,
1812}
1813
1814pub struct SlotH42 {
1815    pub delta: u16,
1816    pub next: u16,
1817}
1818
1819// UNSUPPORTED, for now.
1820pub struct H40 {
1821    pub common: Struct1,
1822    pub addr: [u32; 32768],
1823    pub head: [u16; 32768],
1824    pub tiny_hash: [u8; 65536],
1825    pub banks: [BankH40; 1],
1826    pub free_slot_idx: [u16; 1],
1827    pub max_hops: usize,
1828}
1829
1830pub struct H41 {
1831    pub common: Struct1,
1832    pub addr: [u32; 32768],
1833    pub head: [u16; 32768],
1834    pub tiny_hash: [u8; 65536],
1835    pub banks: [BankH41; 1],
1836    pub free_slot_idx: [u16; 1],
1837    pub max_hops: usize,
1838}
1839
1840pub struct H42 {
1841    pub common: Struct1,
1842    pub addr: [u32; 32768],
1843    pub head: [u16; 32768],
1844    pub tiny_hash: [u8; 65536],
1845    pub banks: [BankH42; 512],
1846    free_slot_idx: [u16; 512],
1847    pub max_hops: usize,
1848}
1849
1850fn unopt_ctzll(mut val: usize) -> u8 {
1851    let mut cnt: u8 = 0u8;
1852    while val & 1 == 0usize {
1853        val >>= 1i32;
1854        cnt = (cnt as i32 + 1) as u8;
1855    }
1856    cnt
1857}
1858
1859fn BackwardReferenceScoreUsingLastDistance(copy_length: usize, h9_opts: H9Opts) -> u64 {
1860    ((h9_opts.literal_byte_score as u64) >> 2)
1861        .wrapping_mul(copy_length as u64)
1862        .wrapping_add((30u64 * 8u64).wrapping_mul(::core::mem::size_of::<u64>() as u64))
1863        .wrapping_add(15)
1864}
1865
1866fn BackwardReferenceScore(
1867    copy_length: usize,
1868    backward_reference_offset: usize,
1869    h9_opts: H9Opts,
1870) -> u64 {
1871    (30u64 * 8u64)
1872        .wrapping_mul(::core::mem::size_of::<u64>() as u64)
1873        .wrapping_add(((h9_opts.literal_byte_score as usize) >> 2).wrapping_mul(copy_length) as u64)
1874        .wrapping_sub(
1875            (30u64).wrapping_mul(Log2FloorNonZero(backward_reference_offset as u64) as u64),
1876        )
1877}
1878
1879fn Hash14(data: &[u8]) -> u32 {
1880    let h: u32 = BROTLI_UNALIGNED_LOAD32(data).wrapping_mul(kHashMul32);
1881    h >> (32i32 - 14i32)
1882}
1883
1884fn TestStaticDictionaryItem(
1885    dictionary: &BrotliDictionary,
1886    item: usize,
1887    data: &[u8],
1888    max_length: usize,
1889    max_backward: usize,
1890    max_distance: usize,
1891    h9_opts: H9Opts,
1892    out: &mut HasherSearchResult,
1893) -> i32 {
1894    let backward: usize;
1895
1896    let len: usize = item & 0x1fusize;
1897    let dist: usize = item >> 5;
1898    let offset: usize =
1899        (dictionary.offsets_by_length[len] as usize).wrapping_add(len.wrapping_mul(dist));
1900    if len > max_length {
1901        return 0i32;
1902    }
1903    let matchlen: usize = FindMatchLengthWithLimit(data, &dictionary.data[offset..], len);
1904    if matchlen.wrapping_add(kCutoffTransformsCount as usize) <= len || matchlen == 0usize {
1905        return 0i32;
1906    }
1907    {
1908        let cut: u64 = len.wrapping_sub(matchlen) as u64;
1909        let transform_id: usize =
1910            (cut << 2).wrapping_add(kCutoffTransforms >> cut.wrapping_mul(6) & 0x3f) as usize;
1911        backward = max_backward
1912            .wrapping_add(dist)
1913            .wrapping_add(1)
1914            .wrapping_add(transform_id << dictionary.size_bits_by_length[len] as i32);
1915    }
1916    if backward > max_distance {
1917        return 0i32;
1918    }
1919    let score: u64 = BackwardReferenceScore(matchlen, backward, h9_opts);
1920    if score < out.score {
1921        return 0i32;
1922    }
1923    out.len = matchlen;
1924    out.len_x_code = len ^ matchlen;
1925    out.distance = backward;
1926    out.score = score;
1927    1i32
1928}
1929
1930fn SearchInStaticDictionary<HasherType: AnyHasher>(
1931    dictionary: &BrotliDictionary,
1932    dictionary_hash: &[u16],
1933    handle: &mut HasherType,
1934    data: &[u8],
1935    max_length: usize,
1936    max_backward: usize,
1937    max_distance: usize,
1938    out: &mut HasherSearchResult,
1939    shallow: bool,
1940) -> bool {
1941    let mut key: usize;
1942    let mut i: usize;
1943    let mut is_match_found = false;
1944    let opts = handle.Opts();
1945    let xself: &mut Struct1 = handle.GetHasherCommon();
1946    if xself.dict_num_matches < xself.dict_num_lookups >> 7 {
1947        return false;
1948    }
1949    key = (Hash14(data) << 1) as usize; //FIXME: works for any kind of hasher??
1950    i = 0usize;
1951    while i < if shallow { 1 } else { 2 } {
1952        {
1953            let item: usize = dictionary_hash[key] as usize;
1954            xself.dict_num_lookups = xself.dict_num_lookups.wrapping_add(1);
1955            if item != 0usize {
1956                let item_matches: i32 = TestStaticDictionaryItem(
1957                    dictionary,
1958                    item,
1959                    data,
1960                    max_length,
1961                    max_backward,
1962                    max_distance,
1963                    opts,
1964                    out,
1965                );
1966                if item_matches != 0 {
1967                    xself.dict_num_matches = xself.dict_num_matches.wrapping_add(1);
1968                    is_match_found = true;
1969                }
1970            }
1971        }
1972        i = i.wrapping_add(1);
1973        key = key.wrapping_add(1);
1974    }
1975    is_match_found
1976}
1977
1978impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
1979    for BasicHasher<H2Sub<Alloc>>
1980{
1981    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
1982        let mut ret = BasicHasher::<H2Sub<Alloc>> {
1983            GetHasherCommon: self.GetHasherCommon.clone(),
1984            buckets_: H2Sub::<Alloc> {
1985                buckets_: <Alloc as Allocator<u32>>::alloc_cell(m, self.buckets_.buckets_.len()),
1986            },
1987            h9_opts: self.h9_opts,
1988        };
1989        ret.buckets_
1990            .buckets_
1991            .slice_mut()
1992            .clone_from_slice(self.buckets_.buckets_.slice());
1993        ret
1994    }
1995}
1996impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
1997    for BasicHasher<H3Sub<Alloc>>
1998{
1999    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2000        let mut ret = BasicHasher::<H3Sub<Alloc>> {
2001            GetHasherCommon: self.GetHasherCommon.clone(),
2002            buckets_: H3Sub::<Alloc> {
2003                buckets_: <Alloc as Allocator<u32>>::alloc_cell(m, self.buckets_.buckets_.len()),
2004            },
2005            h9_opts: self.h9_opts,
2006        };
2007        ret.buckets_
2008            .buckets_
2009            .slice_mut()
2010            .clone_from_slice(self.buckets_.buckets_.slice());
2011        ret
2012    }
2013}
2014impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
2015    for BasicHasher<H4Sub<Alloc>>
2016{
2017    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2018        let mut ret = BasicHasher::<H4Sub<Alloc>> {
2019            GetHasherCommon: self.GetHasherCommon.clone(),
2020            buckets_: H4Sub::<Alloc> {
2021                buckets_: <Alloc as Allocator<u32>>::alloc_cell(m, self.buckets_.buckets_.len()),
2022            },
2023            h9_opts: self.h9_opts,
2024        };
2025        ret.buckets_
2026            .buckets_
2027            .slice_mut()
2028            .clone_from_slice(self.buckets_.buckets_.slice());
2029        ret
2030    }
2031}
2032impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
2033    for BasicHasher<H54Sub<Alloc>>
2034{
2035    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2036        let mut ret = BasicHasher::<H54Sub<Alloc>> {
2037            GetHasherCommon: self.GetHasherCommon.clone(),
2038            buckets_: H54Sub::<Alloc> {
2039                buckets_: <Alloc as Allocator<u32>>::alloc_cell(m, self.buckets_.len()),
2040            },
2041            h9_opts: self.h9_opts,
2042        };
2043        ret.buckets_
2044            .buckets_
2045            .slice_mut()
2046            .clone_from_slice(self.buckets_.buckets_.slice());
2047        ret
2048    }
2049}
2050impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc> for H9<Alloc> {
2051    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2052        let mut num = <Alloc as Allocator<u16>>::alloc_cell(m, self.num_.len());
2053        num.slice_mut().clone_from_slice(self.num_.slice());
2054        let mut buckets = <Alloc as Allocator<u32>>::alloc_cell(m, self.buckets_.len());
2055        buckets.slice_mut().clone_from_slice(self.buckets_.slice());
2056        H9::<Alloc> {
2057            num_: num,
2058            buckets_: buckets,
2059            dict_search_stats_: self.dict_search_stats_.clone(),
2060            h9_opts: self.h9_opts,
2061        }
2062    }
2063}
2064impl<
2065        Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
2066        Special: AdvHashSpecialization + Sized + Clone,
2067    > CloneWithAlloc<Alloc> for AdvHasher<Special, Alloc>
2068{
2069    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2070        let mut num = <Alloc as Allocator<u16>>::alloc_cell(m, self.num.len());
2071        num.slice_mut().clone_from_slice(self.num.slice());
2072        let mut buckets = <Alloc as Allocator<u32>>::alloc_cell(m, self.buckets.len());
2073        buckets.slice_mut().clone_from_slice(self.buckets.slice());
2074        AdvHasher::<Special, Alloc> {
2075            GetHasherCommon: self.GetHasherCommon.clone(),
2076            specialization: self.specialization.clone(),
2077            num,
2078            buckets,
2079            h9_opts: self.h9_opts,
2080        }
2081    }
2082}
2083
2084pub enum UnionHasher<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> {
2085    Uninit,
2086    H2(BasicHasher<H2Sub<Alloc>>),
2087    H3(BasicHasher<H3Sub<Alloc>>),
2088    H4(BasicHasher<H4Sub<Alloc>>),
2089    H54(BasicHasher<H54Sub<Alloc>>),
2090    H5(AdvHasher<H5Sub, Alloc>),
2091    H5q7(AdvHasher<HQ7Sub, Alloc>),
2092    H5q5(AdvHasher<HQ5Sub, Alloc>),
2093    H6(AdvHasher<H6Sub, Alloc>),
2094    H9(H9<Alloc>),
2095    H10(H10<Alloc, H10Buckets<Alloc>, H10DefaultParams>),
2096}
2097impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> PartialEq<UnionHasher<Alloc>>
2098    for UnionHasher<Alloc>
2099{
2100    fn eq(&self, other: &UnionHasher<Alloc>) -> bool {
2101        match *self {
2102            UnionHasher::H2(ref hasher) => match *other {
2103                UnionHasher::H2(ref otherh) => *hasher == *otherh,
2104                _ => false,
2105            },
2106            UnionHasher::H3(ref hasher) => match *other {
2107                UnionHasher::H3(ref otherh) => *hasher == *otherh,
2108                _ => false,
2109            },
2110            UnionHasher::H4(ref hasher) => match *other {
2111                UnionHasher::H4(ref otherh) => *hasher == *otherh,
2112                _ => false,
2113            },
2114            UnionHasher::H54(ref hasher) => match *other {
2115                UnionHasher::H54(ref otherh) => *hasher == *otherh,
2116                _ => false,
2117            },
2118            UnionHasher::H5(ref hasher) => match *other {
2119                UnionHasher::H5(ref otherh) => *hasher == *otherh,
2120                _ => false,
2121            },
2122            UnionHasher::H5q7(ref hasher) => match *other {
2123                UnionHasher::H5q7(ref otherh) => *hasher == *otherh,
2124                _ => false,
2125            },
2126            UnionHasher::H5q5(ref hasher) => match *other {
2127                UnionHasher::H5q5(ref otherh) => *hasher == *otherh,
2128                _ => false,
2129            },
2130            UnionHasher::H6(ref hasher) => match *other {
2131                UnionHasher::H6(ref otherh) => *hasher == *otherh,
2132                _ => false,
2133            },
2134            UnionHasher::H9(ref hasher) => match *other {
2135                UnionHasher::H9(ref otherh) => *hasher == *otherh,
2136                _ => false,
2137            },
2138            UnionHasher::H10(ref hasher) => match *other {
2139                UnionHasher::H10(ref otherh) => *hasher == *otherh,
2140                _ => false,
2141            },
2142            UnionHasher::Uninit => match *other {
2143                UnionHasher::Uninit => true,
2144                _ => false,
2145            },
2146        }
2147    }
2148}
2149impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> CloneWithAlloc<Alloc>
2150    for UnionHasher<Alloc>
2151{
2152    fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
2153        match *self {
2154            UnionHasher::H2(ref hasher) => UnionHasher::H2(hasher.clone_with_alloc(m)),
2155            UnionHasher::H3(ref hasher) => UnionHasher::H3(hasher.clone_with_alloc(m)),
2156            UnionHasher::H4(ref hasher) => UnionHasher::H4(hasher.clone_with_alloc(m)),
2157            UnionHasher::H5(ref hasher) => UnionHasher::H5(hasher.clone_with_alloc(m)),
2158            UnionHasher::H5q7(ref hasher) => UnionHasher::H5q7(hasher.clone_with_alloc(m)),
2159            UnionHasher::H5q5(ref hasher) => UnionHasher::H5q5(hasher.clone_with_alloc(m)),
2160            UnionHasher::H6(ref hasher) => UnionHasher::H6(hasher.clone_with_alloc(m)),
2161            UnionHasher::H54(ref hasher) => UnionHasher::H54(hasher.clone_with_alloc(m)),
2162            UnionHasher::H9(ref hasher) => UnionHasher::H9(hasher.clone_with_alloc(m)),
2163            UnionHasher::H10(ref hasher) => UnionHasher::H10(hasher.clone_with_alloc(m)),
2164            UnionHasher::Uninit => UnionHasher::Uninit,
2165        }
2166    }
2167}
2168macro_rules! match_all_hashers_mut {
2169    ($xself : expr, $func_call : ident, $( $args:expr),*) => {
2170        match $xself {
2171     &mut UnionHasher::H2(ref mut hasher) => hasher.$func_call($($args),*),
2172     &mut UnionHasher::H3(ref mut hasher) => hasher.$func_call($($args),*),
2173     &mut UnionHasher::H4(ref mut hasher) => hasher.$func_call($($args),*),
2174     &mut UnionHasher::H5(ref mut hasher) => hasher.$func_call($($args),*),
2175     &mut UnionHasher::H5q7(ref mut hasher) => hasher.$func_call($($args),*),
2176     &mut UnionHasher::H5q5(ref mut hasher) => hasher.$func_call($($args),*),
2177     &mut UnionHasher::H6(ref mut hasher) => hasher.$func_call($($args),*),
2178     &mut UnionHasher::H54(ref mut hasher) => hasher.$func_call($($args),*),
2179     &mut UnionHasher::H9(ref mut hasher) => hasher.$func_call($($args),*),
2180     &mut UnionHasher::H10(ref mut hasher) => hasher.$func_call($($args),*),
2181     &mut UnionHasher::Uninit => panic!("UNINTIALIZED"),
2182        }
2183    };
2184}
2185macro_rules! match_all_hashers {
2186    ($xself : expr, $func_call : ident, $( $args:expr),*) => {
2187        match $xself {
2188     &UnionHasher::H2(ref hasher) => hasher.$func_call($($args),*),
2189     &UnionHasher::H3(ref hasher) => hasher.$func_call($($args),*),
2190     &UnionHasher::H4(ref hasher) => hasher.$func_call($($args),*),
2191     &UnionHasher::H5(ref hasher) => hasher.$func_call($($args),*),
2192     &UnionHasher::H5q7(ref hasher) => hasher.$func_call($($args),*),
2193     &UnionHasher::H5q5(ref hasher) => hasher.$func_call($($args),*),
2194     &UnionHasher::H6(ref hasher) => hasher.$func_call($($args),*),
2195     &UnionHasher::H54(ref hasher) => hasher.$func_call($($args),*),
2196     &UnionHasher::H9(ref hasher) => hasher.$func_call($($args),*),
2197     &UnionHasher::H10(ref hasher) => hasher.$func_call($($args),*),
2198     &UnionHasher::Uninit => panic!("UNINTIALIZED"),
2199        }
2200    };
2201}
2202impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> AnyHasher for UnionHasher<Alloc> {
2203    fn Opts(&self) -> H9Opts {
2204        match_all_hashers!(self, Opts,)
2205    }
2206    fn GetHasherCommon(&mut self) -> &mut Struct1 {
2207        match_all_hashers_mut!(self, GetHasherCommon,)
2208    } /*
2209      fn GetH10Tree(&mut self) -> Option<&mut H10<AllocU32, H10Buckets, H10DefaultParams>> {
2210        return match_all_hashers_mut!(self, GetH10Tree,);
2211      }*/
2212    fn Prepare(&mut self, one_shot: bool, input_size: usize, data: &[u8]) -> HowPrepared {
2213        match_all_hashers_mut!(self, Prepare, one_shot, input_size, data)
2214    }
2215    fn HashBytes(&self, data: &[u8]) -> usize {
2216        match_all_hashers!(self, HashBytes, data)
2217    }
2218    fn HashTypeLength(&self) -> usize {
2219        match_all_hashers!(self, HashTypeLength,)
2220    }
2221    fn StoreLookahead(&self) -> usize {
2222        match_all_hashers!(self, StoreLookahead,)
2223    }
2224    fn PrepareDistanceCache(&self, distance_cache: &mut [i32]) {
2225        match_all_hashers!(self, PrepareDistanceCache, distance_cache)
2226    }
2227    fn StitchToPreviousBlock(
2228        &mut self,
2229        num_bytes: usize,
2230        position: usize,
2231        ringbuffer: &[u8],
2232        ringbuffer_mask: usize,
2233    ) {
2234        match_all_hashers_mut!(
2235            self,
2236            StitchToPreviousBlock,
2237            num_bytes,
2238            position,
2239            ringbuffer,
2240            ringbuffer_mask
2241        )
2242    }
2243    fn FindLongestMatch(
2244        &mut self,
2245        dictionary: Option<&BrotliDictionary>,
2246        dictionary_hash: &[u16],
2247        data: &[u8],
2248        ring_buffer_mask: usize,
2249        distance_cache: &[i32],
2250        cur_ix: usize,
2251        max_length: usize,
2252        max_backward: usize,
2253        gap: usize,
2254        max_distance: usize,
2255        out: &mut HasherSearchResult,
2256    ) -> bool {
2257        match_all_hashers_mut!(
2258            self,
2259            FindLongestMatch,
2260            dictionary,
2261            dictionary_hash,
2262            data,
2263            ring_buffer_mask,
2264            distance_cache,
2265            cur_ix,
2266            max_length,
2267            max_backward,
2268            gap,
2269            max_distance,
2270            out
2271        )
2272    }
2273    fn Store(&mut self, data: &[u8], mask: usize, ix: usize) {
2274        match_all_hashers_mut!(self, Store, data, mask, ix)
2275    }
2276    fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
2277        match_all_hashers_mut!(self, StoreRange, data, mask, ix_start, ix_end)
2278    }
2279    fn BulkStoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
2280        match_all_hashers_mut!(self, BulkStoreRange, data, mask, ix_start, ix_end)
2281    }
2282}
2283
2284impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> UnionHasher<Alloc> {
2285    pub fn free(&mut self, alloc: &mut Alloc) {
2286        match self {
2287            &mut UnionHasher::H2(ref mut hasher) => {
2288                <Alloc as Allocator<u32>>::free_cell(
2289                    alloc,
2290                    core::mem::take(&mut hasher.buckets_.buckets_),
2291                );
2292            }
2293            &mut UnionHasher::H3(ref mut hasher) => {
2294                <Alloc as Allocator<u32>>::free_cell(
2295                    alloc,
2296                    core::mem::take(&mut hasher.buckets_.buckets_),
2297                );
2298            }
2299            &mut UnionHasher::H4(ref mut hasher) => {
2300                <Alloc as Allocator<u32>>::free_cell(
2301                    alloc,
2302                    core::mem::take(&mut hasher.buckets_.buckets_),
2303                );
2304            }
2305            &mut UnionHasher::H54(ref mut hasher) => {
2306                <Alloc as Allocator<u32>>::free_cell(
2307                    alloc,
2308                    core::mem::take(&mut hasher.buckets_.buckets_),
2309                );
2310            }
2311            &mut UnionHasher::H5q7(ref mut hasher) => {
2312                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num));
2313                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets));
2314            }
2315            &mut UnionHasher::H5q5(ref mut hasher) => {
2316                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num));
2317                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets));
2318            }
2319            &mut UnionHasher::H5(ref mut hasher) => {
2320                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num));
2321                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets));
2322            }
2323            &mut UnionHasher::H6(ref mut hasher) => {
2324                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num));
2325                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets));
2326            }
2327            &mut UnionHasher::H9(ref mut hasher) => {
2328                <Alloc as Allocator<u16>>::free_cell(alloc, core::mem::take(&mut hasher.num_));
2329                <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut hasher.buckets_));
2330            }
2331            &mut UnionHasher::H10(ref mut hasher) => {
2332                hasher.free(alloc);
2333            }
2334            &mut UnionHasher::Uninit => {}
2335        }
2336        *self = UnionHasher::<Alloc>::default();
2337    }
2338}
2339
2340impl<Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>> Default for UnionHasher<Alloc> {
2341    fn default() -> Self {
2342        UnionHasher::Uninit
2343    }
2344}
2345
2346/*UnionHasher::H2(BasicHasher {
2347GetHasherCommon:Struct1{params:BrotliHasherParams{
2348 type_:2,
2349 block_bits: 8,
2350 bucket_bits:16,
2351 hash_len: 4,
2352 num_last_distances_to_check:0},
2353is_prepared_:0,
2354dict_num_lookups:0,
2355dict_num_matches:0,
2356},
2357buckets_:H2Sub{
2358buckets_:[0;65537],
2359},
2360})
2361*/
2362fn CreateBackwardReferences<AH: AnyHasher>(
2363    dictionary: Option<&BrotliDictionary>,
2364    dictionary_hash: &[u16],
2365    num_bytes: usize,
2366    mut position: usize,
2367    ringbuffer: &[u8],
2368    ringbuffer_mask: usize,
2369    params: &BrotliEncoderParams,
2370    hasher: &mut AH,
2371    dist_cache: &mut [i32],
2372    last_insert_len: &mut usize,
2373    mut commands: &mut [Command],
2374    num_commands: &mut usize,
2375    num_literals: &mut usize,
2376) {
2377    let gap = 0usize;
2378    let max_backward_limit: usize = (1usize << params.lgwin).wrapping_sub(16);
2379    let mut new_commands_count: usize = 0;
2380    let mut insert_length: usize = *last_insert_len;
2381    let pos_end: usize = position.wrapping_add(num_bytes);
2382    let store_end: usize = if num_bytes >= hasher.StoreLookahead() {
2383        position
2384            .wrapping_add(num_bytes)
2385            .wrapping_sub(hasher.StoreLookahead())
2386            .wrapping_add(1)
2387    } else {
2388        position
2389    };
2390    let random_heuristics_window_size: usize = LiteralSpreeLengthForSparseSearch(params);
2391    let mut apply_random_heuristics: usize = position.wrapping_add(random_heuristics_window_size);
2392    let kMinScore: u64 = (30u64 * 8)
2393        .wrapping_mul(::core::mem::size_of::<u64>() as u64)
2394        .wrapping_add(100);
2395    hasher.PrepareDistanceCache(dist_cache);
2396    while position.wrapping_add(hasher.HashTypeLength()) < pos_end {
2397        let mut max_length: usize = pos_end.wrapping_sub(position);
2398        let mut max_distance: usize = brotli_min_size_t(position, max_backward_limit);
2399        let mut sr = HasherSearchResult {
2400            len: 0,
2401            len_x_code: 0,
2402            distance: 0,
2403            score: 0,
2404        };
2405        sr.len = 0usize;
2406        sr.len_x_code = 0usize;
2407        sr.distance = 0usize;
2408        sr.score = kMinScore;
2409        if hasher.FindLongestMatch(
2410            dictionary,
2411            dictionary_hash,
2412            ringbuffer,
2413            ringbuffer_mask,
2414            dist_cache,
2415            position,
2416            max_length,
2417            max_distance,
2418            gap,
2419            params.dist.max_distance,
2420            &mut sr,
2421        ) {
2422            let mut delayed_backward_references_in_row: i32 = 0i32;
2423            max_length = max_length.wrapping_sub(1);
2424            'break6: loop {
2425                'continue7: loop {
2426                    let cost_diff_lazy: u64 = 175;
2427
2428                    let mut sr2 = HasherSearchResult {
2429                        len: 0,
2430                        len_x_code: 0,
2431                        distance: 0,
2432                        score: 0,
2433                    };
2434                    sr2.len = if params.quality < 5 {
2435                        brotli_min_size_t(sr.len.wrapping_sub(1), max_length)
2436                    } else {
2437                        0usize
2438                    };
2439                    sr2.len_x_code = 0usize;
2440                    sr2.distance = 0usize;
2441                    sr2.score = kMinScore;
2442                    max_distance = brotli_min_size_t(position.wrapping_add(1), max_backward_limit);
2443                    let is_match_found: bool = hasher.FindLongestMatch(
2444                        dictionary,
2445                        dictionary_hash,
2446                        ringbuffer,
2447                        ringbuffer_mask,
2448                        dist_cache,
2449                        position.wrapping_add(1),
2450                        max_length,
2451                        max_distance,
2452                        gap,
2453                        params.dist.max_distance,
2454                        &mut sr2,
2455                    );
2456                    if is_match_found && (sr2.score >= sr.score.wrapping_add(cost_diff_lazy)) {
2457                        position = position.wrapping_add(1);
2458                        insert_length = insert_length.wrapping_add(1);
2459                        sr = sr2;
2460                        if {
2461                            delayed_backward_references_in_row += 1;
2462                            delayed_backward_references_in_row
2463                        } < 4i32
2464                            && (position.wrapping_add(hasher.HashTypeLength()) < pos_end)
2465                        {
2466                            {
2467                                break 'continue7;
2468                            }
2469                        }
2470                    }
2471                    break 'break6;
2472                }
2473                max_length = max_length.wrapping_sub(1);
2474            }
2475            apply_random_heuristics = position
2476                .wrapping_add((2usize).wrapping_mul(sr.len))
2477                .wrapping_add(random_heuristics_window_size);
2478            max_distance = brotli_min_size_t(position, max_backward_limit);
2479            {
2480                let distance_code: usize =
2481                    ComputeDistanceCode(sr.distance, max_distance, dist_cache);
2482                if sr.distance <= max_distance && (distance_code > 0usize) {
2483                    dist_cache[3] = dist_cache[2];
2484                    dist_cache[2] = dist_cache[1];
2485                    dist_cache[1] = dist_cache[0];
2486                    dist_cache[0] = sr.distance as i32;
2487                    hasher.PrepareDistanceCache(dist_cache);
2488                }
2489                new_commands_count += 1;
2490                InitCommand(
2491                    {
2492                        let (mut _old, new_commands) =
2493                            core::mem::take(&mut commands).split_at_mut(1);
2494                        commands = new_commands;
2495                        &mut _old[0]
2496                    },
2497                    &params.dist,
2498                    insert_length,
2499                    sr.len,
2500                    sr.len ^ sr.len_x_code,
2501                    distance_code,
2502                );
2503            }
2504            *num_literals = num_literals.wrapping_add(insert_length);
2505            insert_length = 0usize;
2506            hasher.StoreRange(
2507                ringbuffer,
2508                ringbuffer_mask,
2509                position.wrapping_add(2),
2510                brotli_min_size_t(position.wrapping_add(sr.len), store_end),
2511            );
2512            position = position.wrapping_add(sr.len);
2513        } else {
2514            insert_length = insert_length.wrapping_add(1);
2515            position = position.wrapping_add(1);
2516
2517            if position > apply_random_heuristics {
2518                let kMargin: usize =
2519                    brotli_max_size_t(hasher.StoreLookahead().wrapping_sub(1), 4usize);
2520                if position.wrapping_add(16) >= pos_end.wrapping_sub(kMargin) {
2521                    insert_length = insert_length.wrapping_add(pos_end - position);
2522                    position = pos_end;
2523                } else if position
2524                    > apply_random_heuristics
2525                        .wrapping_add((4usize).wrapping_mul(random_heuristics_window_size))
2526                {
2527                    hasher.Store4Vec4(ringbuffer, ringbuffer_mask, position);
2528                    insert_length = insert_length.wrapping_add(16);
2529                    position = position.wrapping_add(16);
2530                } else {
2531                    hasher.StoreEvenVec4(ringbuffer, ringbuffer_mask, position);
2532                    insert_length = insert_length.wrapping_add(8);
2533                    position = position.wrapping_add(8);
2534                }
2535            }
2536        }
2537    }
2538    insert_length = insert_length.wrapping_add(pos_end.wrapping_sub(position));
2539    *last_insert_len = insert_length;
2540    *num_commands = num_commands.wrapping_add(new_commands_count);
2541}
2542pub fn BrotliCreateBackwardReferences<
2543    Alloc: alloc::Allocator<u16>
2544        + alloc::Allocator<u32>
2545        + alloc::Allocator<u64>
2546        + alloc::Allocator<floatX>
2547        + alloc::Allocator<ZopfliNode>,
2548>(
2549    alloc: &mut Alloc,
2550    dictionary: &BrotliDictionary,
2551    num_bytes: usize,
2552    position: usize,
2553    ringbuffer: &[u8],
2554    ringbuffer_mask: usize,
2555    params: &BrotliEncoderParams,
2556    hasher_union: &mut UnionHasher<Alloc>,
2557    dist_cache: &mut [i32],
2558    last_insert_len: &mut usize,
2559    commands: &mut [Command],
2560    num_commands: &mut usize,
2561    num_literals: &mut usize,
2562) {
2563    match (hasher_union) {
2564        &mut UnionHasher::Uninit => panic!("working with uninitialized hash map"),
2565        &mut UnionHasher::H10(ref mut hasher) => {
2566            if params.quality >= 11 {
2567                super::backward_references_hq::BrotliCreateHqZopfliBackwardReferences(
2568                    alloc,
2569                    if params.use_dictionary {
2570                        Some(dictionary)
2571                    } else {
2572                        None
2573                    },
2574                    num_bytes,
2575                    position,
2576                    ringbuffer,
2577                    ringbuffer_mask,
2578                    params,
2579                    hasher,
2580                    dist_cache,
2581                    last_insert_len,
2582                    commands,
2583                    num_commands,
2584                    num_literals,
2585                )
2586            } else {
2587                super::backward_references_hq::BrotliCreateZopfliBackwardReferences(
2588                    alloc,
2589                    if params.use_dictionary {
2590                        Some(dictionary)
2591                    } else {
2592                        None
2593                    },
2594                    num_bytes,
2595                    position,
2596                    ringbuffer,
2597                    ringbuffer_mask,
2598                    params,
2599                    hasher,
2600                    dist_cache,
2601                    last_insert_len,
2602                    commands,
2603                    num_commands,
2604                    num_literals,
2605                )
2606            }
2607        }
2608        &mut UnionHasher::H2(ref mut hasher) => CreateBackwardReferences(
2609            if params.use_dictionary {
2610                Some(dictionary)
2611            } else {
2612                None
2613            },
2614            &kStaticDictionaryHash[..],
2615            num_bytes,
2616            position,
2617            ringbuffer,
2618            ringbuffer_mask,
2619            params,
2620            hasher,
2621            dist_cache,
2622            last_insert_len,
2623            commands,
2624            num_commands,
2625            num_literals,
2626        ),
2627        &mut UnionHasher::H3(ref mut hasher) => CreateBackwardReferences(
2628            if params.use_dictionary {
2629                Some(dictionary)
2630            } else {
2631                None
2632            },
2633            &kStaticDictionaryHash[..],
2634            num_bytes,
2635            position,
2636            ringbuffer,
2637            ringbuffer_mask,
2638            params,
2639            hasher,
2640            dist_cache,
2641            last_insert_len,
2642            commands,
2643            num_commands,
2644            num_literals,
2645        ),
2646        &mut UnionHasher::H4(ref mut hasher) => CreateBackwardReferences(
2647            if params.use_dictionary {
2648                Some(dictionary)
2649            } else {
2650                None
2651            },
2652            &kStaticDictionaryHash[..],
2653            num_bytes,
2654            position,
2655            ringbuffer,
2656            ringbuffer_mask,
2657            params,
2658            hasher,
2659            dist_cache,
2660            last_insert_len,
2661            commands,
2662            num_commands,
2663            num_literals,
2664        ),
2665        &mut UnionHasher::H5(ref mut hasher) => CreateBackwardReferences(
2666            if params.use_dictionary {
2667                Some(dictionary)
2668            } else {
2669                None
2670            },
2671            &kStaticDictionaryHash[..],
2672            num_bytes,
2673            position,
2674            ringbuffer,
2675            ringbuffer_mask,
2676            params,
2677            hasher,
2678            dist_cache,
2679            last_insert_len,
2680            commands,
2681            num_commands,
2682            num_literals,
2683        ),
2684        &mut UnionHasher::H5q7(ref mut hasher) => CreateBackwardReferences(
2685            if params.use_dictionary {
2686                Some(dictionary)
2687            } else {
2688                None
2689            },
2690            &kStaticDictionaryHash[..],
2691            num_bytes,
2692            position,
2693            ringbuffer,
2694            ringbuffer_mask,
2695            params,
2696            hasher,
2697            dist_cache,
2698            last_insert_len,
2699            commands,
2700            num_commands,
2701            num_literals,
2702        ),
2703        &mut UnionHasher::H5q5(ref mut hasher) => CreateBackwardReferences(
2704            if params.use_dictionary {
2705                Some(dictionary)
2706            } else {
2707                None
2708            },
2709            &kStaticDictionaryHash[..],
2710            num_bytes,
2711            position,
2712            ringbuffer,
2713            ringbuffer_mask,
2714            params,
2715            hasher,
2716            dist_cache,
2717            last_insert_len,
2718            commands,
2719            num_commands,
2720            num_literals,
2721        ),
2722        &mut UnionHasher::H6(ref mut hasher) => CreateBackwardReferences(
2723            if params.use_dictionary {
2724                Some(dictionary)
2725            } else {
2726                None
2727            },
2728            &kStaticDictionaryHash[..],
2729            num_bytes,
2730            position,
2731            ringbuffer,
2732            ringbuffer_mask,
2733            params,
2734            hasher,
2735            dist_cache,
2736            last_insert_len,
2737            commands,
2738            num_commands,
2739            num_literals,
2740        ),
2741        &mut UnionHasher::H9(ref mut hasher) => CreateBackwardReferences(
2742            if params.use_dictionary {
2743                Some(dictionary)
2744            } else {
2745                None
2746            },
2747            &kStaticDictionaryHash[..],
2748            num_bytes,
2749            position,
2750            ringbuffer,
2751            ringbuffer_mask,
2752            params,
2753            hasher,
2754            dist_cache,
2755            last_insert_len,
2756            commands,
2757            num_commands,
2758            num_literals,
2759        ),
2760        &mut UnionHasher::H54(ref mut hasher) => CreateBackwardReferences(
2761            if params.use_dictionary {
2762                Some(dictionary)
2763            } else {
2764                None
2765            },
2766            &kStaticDictionaryHash[..],
2767            num_bytes,
2768            position,
2769            ringbuffer,
2770            ringbuffer_mask,
2771            params,
2772            hasher,
2773            dist_cache,
2774            last_insert_len,
2775            commands,
2776            num_commands,
2777            num_literals,
2778        ),
2779    }
2780}