1#![allow(dead_code, unused_imports)]
2use super::{
3 kDistanceCacheIndex, kDistanceCacheOffset, kHashMul32, kHashMul64, kHashMul64Long,
4 kInvalidMatch, AnyHasher, BrotliEncoderParams, BrotliHasherParams, CloneWithAlloc, H9Opts,
5 HasherSearchResult, HowPrepared, Struct1,
6};
7use alloc;
8use alloc::{Allocator, SliceWrapper, SliceWrapperMut};
9use core;
10use enc::command::{
11 CombineLengthCodes, Command, CommandCopyLen, ComputeDistanceCode, GetCopyLengthCode,
12 GetInsertLengthCode, InitCommand, PrefixEncodeCopyDistance,
13};
14use enc::constants::{kCopyExtra, kInsExtra};
15use enc::dictionary_hash::kStaticDictionaryHash;
16use enc::literal_cost::BrotliEstimateBitCostsForLiterals;
17use enc::static_dict::{
18 kBrotliEncDictionary, BrotliDictionary, BrotliFindAllStaticDictionaryMatches,
19};
20use enc::static_dict::{
21 FindMatchLengthWithLimit, BROTLI_UNALIGNED_LOAD32, BROTLI_UNALIGNED_LOAD64,
22};
23use enc::util::{brotli_max_size_t, floatX, FastLog2, Log2FloorNonZero};
24
25pub const kInfinity: floatX = 1.7e38 as floatX;
26#[derive(Clone, Copy, Debug)]
27pub enum Union1 {
28 cost(floatX),
29 next(u32),
30 shortcut(u32),
31}
32
33#[derive(Clone, Copy, Debug)]
34pub struct ZopfliNode {
35 pub length: u32,
37 pub distance: u32,
39 pub dcode_insert_length: u32,
41 pub u: Union1,
42}
43impl Default for ZopfliNode {
44 fn default() -> Self {
45 ZopfliNode {
46 length: 1,
47 distance: 0,
48 dcode_insert_length: 0,
49 u: Union1::cost(kInfinity),
50 }
51 }
52}
53
54pub trait Allocable<T: Copy, AllocT: Allocator<T>> {
55 fn new(m: &mut AllocT, init: T) -> Self;
56 fn new_uninit(m: &mut AllocT) -> Self;
57 fn free(&mut self, m: &mut AllocT);
58}
59pub trait H10Params {
60 fn max_tree_search_depth() -> u32;
61 fn max_tree_comp_length() -> u32;
62}
63
64pub struct H10DefaultParams {}
65impl H10Params for H10DefaultParams {
66 #[inline(always)]
67 fn max_tree_search_depth() -> u32 {
68 64
69 }
70 #[inline(always)]
71 fn max_tree_comp_length() -> u32 {
72 128
73 }
74}
75
76const BUCKET_BITS: usize = 17;
77
78pub struct H10Buckets<AllocU32: Allocator<u32>>(AllocU32::AllocatedMemory);
79
80impl<AllocU32: Allocator<u32>> Allocable<u32, AllocU32> for H10Buckets<AllocU32> {
81 fn new(m: &mut AllocU32, initializer: u32) -> H10Buckets<AllocU32> {
82 let mut ret = m.alloc_cell(1 << BUCKET_BITS);
83 for item in ret.slice_mut().iter_mut() {
84 *item = initializer;
85 }
86 H10Buckets::<AllocU32>(ret)
87 }
88 fn new_uninit(m: &mut AllocU32) -> H10Buckets<AllocU32> {
89 H10Buckets::<AllocU32>(m.alloc_cell(1 << BUCKET_BITS))
90 }
91 fn free(&mut self, m: &mut AllocU32) {
92 m.free_cell(core::mem::take(&mut self.0));
93 }
94}
95
96impl<AllocU32: Allocator<u32>> PartialEq<H10Buckets<AllocU32>> for H10Buckets<AllocU32> {
97 fn eq(&self, other: &H10Buckets<AllocU32>) -> bool {
98 return self.0.slice() == other.0.slice();
99 }
100}
101
102impl<AllocU32: Allocator<u32>> SliceWrapper<u32> for H10Buckets<AllocU32> {
103 #[inline(always)]
104 fn slice(&self) -> &[u32] {
105 self.0.slice()
106 }
107}
108impl<AllocU32: Allocator<u32>> SliceWrapperMut<u32> for H10Buckets<AllocU32> {
109 #[inline(always)]
110 fn slice_mut(&mut self) -> &mut [u32] {
111 self.0.slice_mut()
112 }
113}
114
115pub struct H10<
116 AllocU32: Allocator<u32>,
117 Buckets: Allocable<u32, AllocU32> + SliceWrapperMut<u32> + SliceWrapper<u32>,
118 Params: H10Params,
119> where
120 Buckets: PartialEq<Buckets>,
121{
122 pub window_mask_: usize,
123 pub common: Struct1,
124 pub buckets_: Buckets,
125 pub invalid_pos_: u32,
126 pub forest: AllocU32::AllocatedMemory,
127 pub _params: core::marker::PhantomData<Params>,
128}
129
130impl<
131 AllocU32: Allocator<u32>,
132 Buckets: Allocable<u32, AllocU32> + SliceWrapperMut<u32> + SliceWrapper<u32>,
133 Params: H10Params,
134 > PartialEq<H10<AllocU32, Buckets, Params>> for H10<AllocU32, Buckets, Params>
135where
136 Buckets: PartialEq<Buckets>,
137{
138 fn eq(&self, other: &H10<AllocU32, Buckets, Params>) -> bool {
139 self.window_mask_ == other.window_mask_
140 && self.common == other.common
141 && self.buckets_ == other.buckets_
142 && self.invalid_pos_ == other.invalid_pos_
143 && self.forest.slice() == other.forest.slice()
144 && self._params == other._params
145 }
146}
147
148pub fn InitializeH10<AllocU32: Allocator<u32>>(
149 m32: &mut AllocU32,
150 one_shot: bool,
151 params: &BrotliEncoderParams,
152 input_size: usize,
153) -> H10<AllocU32, H10Buckets<AllocU32>, H10DefaultParams> {
154 initialize_h10::<AllocU32, H10Buckets<AllocU32>>(m32, one_shot, params, input_size)
155}
156fn initialize_h10<
157 AllocU32: Allocator<u32>,
158 Buckets: SliceWrapperMut<u32> + SliceWrapper<u32> + Allocable<u32, AllocU32>,
159>(
160 m32: &mut AllocU32,
161 one_shot: bool,
162 params: &BrotliEncoderParams,
163 input_size: usize,
164) -> H10<AllocU32, Buckets, H10DefaultParams>
165where
166 Buckets: PartialEq<Buckets>,
167{
168 let mut num_nodes = 1 << params.lgwin;
169 if one_shot && input_size < num_nodes {
170 num_nodes = input_size;
171 }
172 let window_mask = (1 << params.lgwin) - 1;
173 let invalid_pos = 0u32.wrapping_sub(window_mask);
174 let buckets = <Buckets as Allocable<u32, AllocU32>>::new(m32, invalid_pos);
175 H10::<AllocU32, Buckets, H10DefaultParams> {
176 common: Struct1 {
177 params: params.hasher,
178 is_prepared_: 1,
179 dict_num_lookups: 0,
180 dict_num_matches: 0,
181 },
182 _params: core::marker::PhantomData::<H10DefaultParams>,
183 window_mask_: window_mask as usize,
184 invalid_pos_: invalid_pos,
185 buckets_: buckets,
186 forest: m32.alloc_cell(num_nodes * 2),
187 }
188}
189
190impl<
191 AllocU32: Allocator<u32>,
192 Buckets: Allocable<u32, AllocU32> + SliceWrapperMut<u32> + SliceWrapper<u32>,
193 Params: H10Params,
194 > H10<AllocU32, Buckets, Params>
195where
196 Buckets: PartialEq<Buckets>,
197{
198 pub fn free(&mut self, m32: &mut AllocU32) {
199 m32.free_cell(core::mem::take(&mut self.forest));
200 self.buckets_.free(m32);
201 }
202}
203impl<
204 Alloc: alloc::Allocator<u16> + alloc::Allocator<u32>,
205 Buckets: Allocable<u32, Alloc> + SliceWrapperMut<u32> + SliceWrapper<u32>,
206 Params: H10Params,
207 > CloneWithAlloc<Alloc> for H10<Alloc, Buckets, Params>
208where
209 Buckets: PartialEq<Buckets>,
210{
211 fn clone_with_alloc(&self, m: &mut Alloc) -> Self {
212 let mut ret = H10::<Alloc, Buckets, Params> {
213 window_mask_: self.window_mask_,
214 common: self.common.clone(),
215 buckets_: Buckets::new_uninit(m),
216 invalid_pos_: self.invalid_pos_,
217 forest: <Alloc as Allocator<u32>>::alloc_cell(m, self.forest.len()),
218 _params: core::marker::PhantomData::<Params>,
219 };
220 ret.buckets_
221 .slice_mut()
222 .clone_from_slice(self.buckets_.slice());
223 ret.forest.slice_mut().clone_from_slice(self.forest.slice());
224 ret
225 }
226}
227
228impl<
229 AllocU32: Allocator<u32>,
230 Buckets: Allocable<u32, AllocU32> + SliceWrapperMut<u32> + SliceWrapper<u32>,
231 Params: H10Params,
232 > AnyHasher for H10<AllocU32, Buckets, Params>
233where
234 Buckets: PartialEq<Buckets>,
235{
236 #[inline(always)]
240 fn Opts(&self) -> H9Opts {
241 H9Opts {
242 literal_byte_score: 340,
243 }
244 }
245 #[inline(always)]
246 fn PrepareDistanceCache(&self, _distance_cache: &mut [i32]) {}
247 #[inline(always)]
248 fn HashTypeLength(&self) -> usize {
249 4
250 }
251 #[inline(always)]
252 fn StoreLookahead(&self) -> usize {
253 Params::max_tree_comp_length() as usize
254 }
255 fn StitchToPreviousBlock(
256 &mut self,
257 num_bytes: usize,
258 position: usize,
259 ringbuffer: &[u8],
260 ringbuffer_mask: usize,
261 ) {
262 super::hq::StitchToPreviousBlockH10(self, num_bytes, position, ringbuffer, ringbuffer_mask)
263 }
264 #[inline(always)]
265 fn GetHasherCommon(&mut self) -> &mut Struct1 {
266 &mut self.common
267 }
268 #[inline(always)]
269 fn HashBytes(&self, data: &[u8]) -> usize {
270 let h = BROTLI_UNALIGNED_LOAD32(data).wrapping_mul(kHashMul32);
271 (h >> (32i32 - BUCKET_BITS as i32)) as usize
272 }
273 #[inline(always)]
274 fn Store(&mut self, data: &[u8], mask: usize, ix: usize) {
275 let max_backward: usize = self.window_mask_.wrapping_sub(16).wrapping_add(1);
276 StoreAndFindMatchesH10(
277 self,
278 data,
279 ix,
280 mask,
281 Params::max_tree_comp_length() as usize,
282 max_backward,
283 &mut 0,
284 &mut [],
285 );
286 }
287 fn StoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
288 let mut i: usize = ix_start;
289 let mut j: usize = ix_start;
290 if ix_start.wrapping_add(63) <= ix_end {
291 i = ix_end.wrapping_sub(63);
292 }
293 if ix_start.wrapping_add(512) <= i {
294 while j < i {
295 {
296 self.Store(data, mask, j);
297 }
298 j = j.wrapping_add(8);
299 }
300 }
301 while i < ix_end {
302 {
303 self.Store(data, mask, i);
304 }
305 i = i.wrapping_add(1);
306 }
307 }
308 fn BulkStoreRange(&mut self, data: &[u8], mask: usize, ix_start: usize, ix_end: usize) {
309 for i in ix_start..ix_end {
310 self.Store(data, mask, i);
311 }
312 }
313 fn Prepare(&mut self, _one_shot: bool, _input_size: usize, _data: &[u8]) -> HowPrepared {
314 if self.common.is_prepared_ != 0 {
315 return HowPrepared::ALREADY_PREPARED;
316 }
317 let invalid_pos = self.invalid_pos_;
318 for bucket in self.buckets_.slice_mut().iter_mut() {
319 *bucket = invalid_pos;
320 }
321 self.common.is_prepared_ = 1;
322 HowPrepared::NEWLY_PREPARED
323 }
324
325 fn FindLongestMatch(
326 &mut self,
327 _dictionary: Option<&BrotliDictionary>,
328 _dictionary_hash: &[u16],
329 _data: &[u8],
330 _ring_buffer_mask: usize,
331 _distance_cache: &[i32],
332 _cur_ix: usize,
333 _max_length: usize,
334 _max_backward: usize,
335 _gap: usize,
336 _max_distance: usize,
337 _out: &mut HasherSearchResult,
338 ) -> bool {
339 unimplemented!();
340 }
341}
342
343pub struct BackwardMatch(pub u64);
344
345impl BackwardMatch {
348 #[inline(always)]
349 pub fn distance(&self) -> u32 {
350 self.0 as u32
351 }
352 #[inline(always)]
353 pub fn length_and_code(&self) -> u32 {
354 (self.0 >> 32) as u32
355 }
356}
357pub struct BackwardMatchMut<'a>(pub &'a mut u64);
358
359impl<'a> BackwardMatchMut<'a> {
362 #[inline(always)]
363 pub fn distance(&self) -> u32 {
364 *self.0 as u32
365 }
366 #[inline(always)]
367 pub fn length_and_code(&self) -> u32 {
368 (*self.0 >> 32) as u32
369 }
370 #[inline(always)]
371 pub fn set_distance(&mut self, data: u32) {
372 *self.0 &= 0xffffffff00000000;
373 *self.0 |= u64::from(data)
374 }
375 #[inline(always)]
376 pub fn set_length_and_code(&mut self, data: u32) {
377 *self.0 = u64::from((*self.0) as u32) | (u64::from(data) << 32);
378 }
379}
380
381#[inline(always)]
382pub fn InitBackwardMatch(xself: &mut BackwardMatchMut, dist: usize, len: usize) {
383 xself.set_distance(dist as u32);
384 xself.set_length_and_code((len << 5) as u32);
385}
386
387macro_rules! LeftChildIndexH10 {
388 ($xself: expr, $pos: expr) => {
389 (2usize).wrapping_mul($pos & (*$xself).window_mask_)
390 };
391}
392macro_rules! RightChildIndexH10 {
393 ($xself: expr, $pos: expr) => {
394 (2usize)
395 .wrapping_mul($pos & (*$xself).window_mask_)
396 .wrapping_add(1)
397 };
398}
399pub fn StoreAndFindMatchesH10<
422 AllocU32: Allocator<u32>,
423 Buckets: Allocable<u32, AllocU32> + SliceWrapperMut<u32> + SliceWrapper<u32>,
424 Params: H10Params,
425>(
426 xself: &mut H10<AllocU32, Buckets, Params>,
427 data: &[u8],
428 cur_ix: usize,
429 ring_buffer_mask: usize,
430 max_length: usize,
431 max_backward: usize,
432 best_len: &mut usize,
433 matches: &mut [u64],
434) -> usize
435where
436 Buckets: PartialEq<Buckets>,
437{
438 let mut matches_offset = 0usize;
439 let cur_ix_masked: usize = cur_ix & ring_buffer_mask;
440 let max_comp_len: usize = core::cmp::min(max_length, 128usize);
441 let should_reroot_tree = max_length >= 128;
442 let key = xself.HashBytes(&data[cur_ix_masked..]);
443 let forest: &mut [u32] = xself.forest.slice_mut();
444 let mut prev_ix: usize = xself.buckets_.slice()[key] as usize;
445 let mut node_left: usize = LeftChildIndexH10!(xself, cur_ix);
446 let mut node_right: usize = RightChildIndexH10!(xself, cur_ix);
447 let mut best_len_left: usize = 0usize;
448 let mut best_len_right: usize = 0usize;
449 let mut depth_remaining: usize;
450 if should_reroot_tree {
451 xself.buckets_.slice_mut()[key] = cur_ix as u32;
452 }
453 depth_remaining = 64usize;
454 'break16: loop {
455 {
456 let backward: usize = cur_ix.wrapping_sub(prev_ix);
457 let prev_ix_masked: usize = prev_ix & ring_buffer_mask;
458 if backward == 0usize || backward > max_backward || depth_remaining == 0usize {
459 if should_reroot_tree {
460 forest[node_left] = xself.invalid_pos_;
461 forest[node_right] = xself.invalid_pos_;
462 }
463 break 'break16;
464 }
465 {
466 let cur_len: usize = core::cmp::min(best_len_left, best_len_right);
467
468 let len: usize = cur_len.wrapping_add(FindMatchLengthWithLimit(
469 &data[cur_ix_masked.wrapping_add(cur_len)..],
470 &data[prev_ix_masked.wrapping_add(cur_len)..],
471 max_length.wrapping_sub(cur_len),
472 ));
473 if matches_offset != matches.len() && (len > *best_len) {
474 *best_len = len;
475 InitBackwardMatch(
476 &mut BackwardMatchMut(&mut matches[matches_offset]),
477 backward,
478 len,
479 );
480 matches_offset += 1;
481 }
482 if len >= max_comp_len {
483 if should_reroot_tree {
484 forest[node_left] = forest[LeftChildIndexH10!(xself, prev_ix)];
485 forest[node_right] = forest[RightChildIndexH10!(xself, prev_ix)];
486 }
487 break 'break16;
488 }
489 if data[cur_ix_masked.wrapping_add(len)] as i32
490 > data[prev_ix_masked.wrapping_add(len)] as i32
491 {
492 best_len_left = len;
493 if should_reroot_tree {
494 forest[node_left] = prev_ix as u32;
495 }
496 node_left = RightChildIndexH10!(xself, prev_ix);
497 prev_ix = forest[node_left] as usize;
498 } else {
499 best_len_right = len;
500 if should_reroot_tree {
501 forest[node_right] = prev_ix as u32;
502 }
503 node_right = LeftChildIndexH10!(xself, prev_ix);
504 prev_ix = forest[node_right] as usize;
505 }
506 }
507 }
508 depth_remaining = depth_remaining.wrapping_sub(1);
509 }
510 matches_offset
511}