1#![allow(dead_code)]
2use super::backward_references::kHashMul32;
3use super::brotli_bit_stream::{BrotliBuildAndStoreHuffmanTreeFast, BrotliStoreHuffmanTree};
6use super::super::alloc;
13use super::compress_fragment_two_pass::{memcpy, BrotliStoreMetaBlockHeader, BrotliWriteBits};
14use super::entropy_encode::{
15 BrotliConvertBitDepthsToSymbols, BrotliCreateHuffmanTree, HuffmanTree, NewHuffmanTree,
16};
17use super::static_dict::{
18 FindMatchLengthWithLimit, BROTLI_UNALIGNED_LOAD32, BROTLI_UNALIGNED_LOAD64,
19};
20use super::util::{brotli_min_size_t, brotli_min_uint32_t, FastLog2, Log2FloorNonZero};
21
22static kCmdHistoSeed: [u32; 128] = [
25 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
26 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
27 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
28 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0,
29];
30
31fn Hash(p: &[u8], shift: usize) -> u32 {
32 let h: u64 = (BROTLI_UNALIGNED_LOAD64(p) << 24).wrapping_mul(kHashMul32 as (u64));
33 (h >> shift) as u32
34}
35
36fn IsMatch(p1: &[u8], p2: &[u8]) -> bool {
37 BROTLI_UNALIGNED_LOAD32(p1) == BROTLI_UNALIGNED_LOAD32(p2) && (p1[4] as i32 == p2[4] as i32)
38}
39
40fn BuildAndStoreLiteralPrefixCode<AllocHT: alloc::Allocator<HuffmanTree>>(
41 mht: &mut AllocHT,
42 input: &[u8],
43 input_size: usize,
44 depths: &mut [u8],
45 bits: &mut [u16],
46 storage_ix: &mut usize,
47 storage: &mut [u8],
48) -> usize {
49 let mut histogram: [u32; 256] = [0; 256];
50 let mut histogram_total: usize;
51 let mut i: usize;
52 if input_size < (1i32 << 15) as usize {
53 i = 0usize;
54 while i < input_size {
55 {
56 let _rhs = 1;
57 let _lhs = &mut histogram[input[i] as usize];
58 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
59 }
60 i = i.wrapping_add(1);
61 }
62 histogram_total = input_size;
63 i = 0usize;
64 while i < 256usize {
65 {
66 let adjust: u32 = (2u32).wrapping_mul(brotli_min_uint32_t(histogram[i], 11u32));
67 {
68 let _rhs = adjust;
69 let _lhs = &mut histogram[i];
70 *_lhs = (*_lhs).wrapping_add(_rhs);
71 }
72 histogram_total = histogram_total.wrapping_add(adjust as usize);
73 }
74 i = i.wrapping_add(1);
75 }
76 } else {
77 static kSampleRate: usize = 29usize;
78 i = 0usize;
79 while i < input_size {
80 {
81 let _rhs = 1;
82 let _lhs = &mut histogram[input[i] as usize];
83 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
84 }
85 i = i.wrapping_add(kSampleRate);
86 }
87 histogram_total = input_size
88 .wrapping_add(kSampleRate)
89 .wrapping_sub(1)
90 .wrapping_div(kSampleRate);
91 i = 0usize;
92 while i < 256usize {
93 {
94 let adjust: u32 = (1u32)
95 .wrapping_add((2u32).wrapping_mul(brotli_min_uint32_t(histogram[i], 11u32)));
96 {
97 let _rhs = adjust;
98 let _lhs = &mut histogram[i];
99 *_lhs = (*_lhs).wrapping_add(_rhs);
100 }
101 histogram_total = histogram_total.wrapping_add(adjust as usize);
102 }
103 i = i.wrapping_add(1);
104 }
105 }
106 BrotliBuildAndStoreHuffmanTreeFast(
107 mht,
108 &mut histogram[..],
109 histogram_total,
110 8usize,
111 depths,
112 bits,
113 storage_ix,
114 storage,
115 );
116 {
117 let mut literal_ratio: usize = 0usize;
118 i = 0usize;
119 while i < 256usize {
120 {
121 if histogram[i] != 0 {
122 literal_ratio = literal_ratio
123 .wrapping_add(histogram[i].wrapping_mul(depths[i] as u32) as usize);
124 }
125 }
126 i = i.wrapping_add(1);
127 }
128 literal_ratio
129 .wrapping_mul(125)
130 .wrapping_div(histogram_total)
131 }
132}
133#[derive(PartialEq, Eq, Copy, Clone)]
134pub enum CodeBlockState {
135 EMIT_REMAINDER,
136 EMIT_COMMANDS,
137 NEXT_BLOCK,
138}
139
140fn EmitInsertLen(
141 insertlen: usize,
142 depth: &[u8],
143 bits: &[u16],
144 histo: &mut [u32],
145 storage_ix: &mut usize,
146 storage: &mut [u8],
147) {
148 if insertlen < 6usize {
149 let code: usize = insertlen.wrapping_add(40);
150 BrotliWriteBits(
151 depth[code] as usize,
152 bits[code] as (u64),
153 storage_ix,
154 storage,
155 );
156 {
157 let _rhs = 1;
158 let _lhs = &mut histo[code];
159 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
160 }
161 } else if insertlen < 130usize {
162 let tail: usize = insertlen.wrapping_sub(2);
163 let nbits: u32 = Log2FloorNonZero(tail as u64).wrapping_sub(1);
164 let prefix: usize = tail >> nbits;
165 let inscode: usize = ((nbits << 1) as usize)
166 .wrapping_add(prefix)
167 .wrapping_add(42);
168 BrotliWriteBits(
169 depth[(inscode as usize)] as usize,
170 bits[(inscode as usize)] as (u64),
171 storage_ix,
172 storage,
173 );
174 BrotliWriteBits(
175 nbits as usize,
176 (tail as u64).wrapping_sub((prefix as u64) << nbits),
177 storage_ix,
178 storage,
179 );
180 {
181 let _rhs = 1;
182 let _lhs = &mut histo[(inscode as usize)];
183 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
184 }
185 } else if insertlen < 2114usize {
186 let tail: usize = insertlen.wrapping_sub(66);
187 let nbits: u32 = Log2FloorNonZero(tail as u64);
188 let code: usize = nbits.wrapping_add(50) as usize;
189 BrotliWriteBits(
190 depth[(code as usize)] as usize,
191 bits[(code as usize)] as (u64),
192 storage_ix,
193 storage,
194 );
195 BrotliWriteBits(
196 nbits as usize,
197 (tail as u64).wrapping_sub(1 << nbits),
198 storage_ix,
199 storage,
200 );
201 {
202 let _rhs = 1;
203 let _lhs = &mut histo[(code as usize)];
204 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
205 }
206 } else {
207 BrotliWriteBits(depth[61] as usize, bits[61] as (u64), storage_ix, storage);
208 BrotliWriteBits(
209 12usize,
210 (insertlen as u64).wrapping_sub(2114),
211 storage_ix,
212 storage,
213 );
214 {
215 let _rhs = 1;
216 let _lhs = &mut histo[61];
217 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
218 }
219 }
220}
221
222fn ShouldUseUncompressedMode(delta: isize, insertlen: usize, literal_ratio: usize) -> bool {
223 let compressed = delta as usize;
224 if compressed.wrapping_mul(50) > insertlen {
225 false
226 } else if literal_ratio > 980 {
227 true
228 } else {
229 false
230 }
231}
232fn RewindBitPosition(new_storage_ix: usize, storage_ix: &mut usize, storage: &mut [u8]) {
233 let bitpos: usize = new_storage_ix & 7usize;
234 let mask: usize = (1u32 << bitpos).wrapping_sub(1) as usize;
235 {
236 let _rhs = mask as u8;
237 let _lhs = &mut storage[(new_storage_ix >> 3)];
238 *_lhs = (*_lhs as i32 & _rhs as i32) as u8;
239 }
240 *storage_ix = new_storage_ix;
241}
242
243fn EmitUncompressedMetaBlock(
244 begin: &[u8],
245 len: usize,
246 storage_ix_start: usize,
247 storage_ix: &mut usize,
248 storage: &mut [u8],
249) {
250 RewindBitPosition(storage_ix_start, storage_ix, storage);
251 BrotliStoreMetaBlockHeader(len, 1i32, storage_ix, storage);
252 *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
253 memcpy(storage, (*storage_ix >> 3), begin, 0, len);
254 *storage_ix = storage_ix.wrapping_add(len << 3);
255 storage[(*storage_ix >> 3)] = 0u8;
256}
257
258fn EmitLongInsertLen(
259 insertlen: usize,
260 depth: &[u8],
261 bits: &[u16],
262 histo: &mut [u32],
263 storage_ix: &mut usize,
264 storage: &mut [u8],
265) {
266 if insertlen < 22594usize {
267 BrotliWriteBits(depth[62] as usize, bits[62] as (u64), storage_ix, storage);
268 BrotliWriteBits(
269 14usize,
270 (insertlen as u64).wrapping_sub(6210),
271 storage_ix,
272 storage,
273 );
274 {
275 let _rhs = 1;
276 let _lhs = &mut histo[62];
277 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
278 }
279 } else {
280 BrotliWriteBits(depth[63] as usize, bits[63] as (u64), storage_ix, storage);
281 BrotliWriteBits(
282 24usize,
283 (insertlen as u64).wrapping_sub(22594),
284 storage_ix,
285 storage,
286 );
287 {
288 let _rhs = 1;
289 let _lhs = &mut histo[63];
290 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
291 }
292 }
293}
294
295fn EmitLiterals(
296 input: &[u8],
297 len: usize,
298 depth: &[u8],
299 bits: &[u16],
300 storage_ix: &mut usize,
301 storage: &mut [u8],
302) {
303 let mut j: usize;
304 j = 0usize;
305 while j < len {
306 {
307 let lit: u8 = input[j];
308 BrotliWriteBits(
309 depth[(lit as usize)] as usize,
310 bits[(lit as usize)] as (u64),
311 storage_ix,
312 storage,
313 );
314 }
315 j = j.wrapping_add(1);
316 }
317}
318
319fn EmitDistance(
320 distance: usize,
321 depth: &[u8],
322 bits: &[u16],
323 histo: &mut [u32],
324 storage_ix: &mut usize,
325 storage: &mut [u8],
326) {
327 let d: u64 = distance.wrapping_add(3) as u64;
328 let nbits: u32 = Log2FloorNonZero(d).wrapping_sub(1);
329 let prefix: u64 = d >> nbits & 1;
330 let offset: u64 = (2u64).wrapping_add(prefix) << nbits;
331 let distcode: u64 = ((2u32).wrapping_mul(nbits.wrapping_sub(1)) as (u64))
332 .wrapping_add(prefix)
333 .wrapping_add(80);
334 BrotliWriteBits(
335 depth[(distcode as usize)] as usize,
336 bits[(distcode as usize)] as (u64),
337 storage_ix,
338 storage,
339 );
340 BrotliWriteBits(nbits as usize, d.wrapping_sub(offset), storage_ix, storage);
341 {
342 let _rhs = 1;
343 let _lhs = &mut histo[(distcode as usize)];
344 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
345 }
346}
347
348fn EmitCopyLenLastDistance(
349 copylen: usize,
350 depth: &[u8],
351 bits: &[u16],
352 histo: &mut [u32],
353 storage_ix: &mut usize,
354 storage: &mut [u8],
355) {
356 if copylen < 12usize {
357 BrotliWriteBits(
358 depth[copylen.wrapping_sub(4)] as usize,
359 bits[copylen.wrapping_sub(4)] as (u64),
360 storage_ix,
361 storage,
362 );
363 {
364 let _rhs = 1;
365 let _lhs = &mut histo[copylen.wrapping_sub(4)];
366 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
367 }
368 } else if copylen < 72usize {
369 let tail: usize = copylen.wrapping_sub(8);
370 let nbits: u32 = Log2FloorNonZero(tail as u64).wrapping_sub(1);
371 let prefix: usize = tail >> nbits;
372 let code: usize = ((nbits << 1) as usize).wrapping_add(prefix).wrapping_add(4);
373 BrotliWriteBits(
374 depth[(code as usize)] as usize,
375 bits[(code as usize)] as (u64),
376 storage_ix,
377 storage,
378 );
379 BrotliWriteBits(
380 nbits as usize,
381 tail.wrapping_sub(prefix << nbits) as u64,
382 storage_ix,
383 storage,
384 );
385 {
386 let _rhs = 1;
387 let _lhs = &mut histo[(code as usize)];
388 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
389 }
390 } else if copylen < 136usize {
391 let tail: usize = copylen.wrapping_sub(8);
392 let code: usize = (tail >> 5).wrapping_add(30);
393 BrotliWriteBits(
394 depth[code] as usize,
395 bits[code] as (u64),
396 storage_ix,
397 storage,
398 );
399 BrotliWriteBits(5usize, tail as u64 & 31, storage_ix, storage);
400 BrotliWriteBits(depth[64] as usize, bits[64] as (u64), storage_ix, storage);
401 {
402 let _rhs = 1;
403 let _lhs = &mut histo[code];
404 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
405 }
406 {
407 let _rhs = 1;
408 let _lhs = &mut histo[64];
409 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
410 }
411 } else if copylen < 2120usize {
412 let tail: usize = copylen.wrapping_sub(72);
413 let nbits: u32 = Log2FloorNonZero(tail as u64);
414 let code: usize = nbits.wrapping_add(28) as usize;
415 BrotliWriteBits(
416 depth[(code as usize)] as usize,
417 bits[(code as usize)] as (u64),
418 storage_ix,
419 storage,
420 );
421 BrotliWriteBits(
422 nbits as usize,
423 (tail as u64).wrapping_sub(1u64 << nbits),
424 storage_ix,
425 storage,
426 );
427 BrotliWriteBits(depth[64] as usize, bits[64] as (u64), storage_ix, storage);
428 {
429 let _rhs = 1;
430 let _lhs = &mut histo[(code as usize)];
431 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
432 }
433 {
434 let _rhs = 1;
435 let _lhs = &mut histo[64];
436 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
437 }
438 } else {
439 BrotliWriteBits(depth[39] as usize, bits[39] as (u64), storage_ix, storage);
440 BrotliWriteBits(
441 24usize,
442 copylen.wrapping_sub(2120) as u64,
443 storage_ix,
444 storage,
445 );
446 BrotliWriteBits(depth[64] as usize, bits[64] as (u64), storage_ix, storage);
447 {
448 let _rhs = 1;
449 let _lhs = &mut histo[39];
450 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
451 }
452 {
453 let _rhs = 1;
454 let _lhs = &mut histo[64];
455 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
456 }
457 }
458}
459
460fn HashBytesAtOffset(v: u64, offset: i32, shift: usize) -> u32 {
461 {
462 let h: u64 = (v >> (8i32 * offset) << 24).wrapping_mul(kHashMul32 as (u64));
463 (h >> shift) as u32
464 }
465}
466
467fn EmitCopyLen(
468 copylen: usize,
469 depth: &[u8],
470 bits: &[u16],
471 histo: &mut [u32],
472 storage_ix: &mut usize,
473 storage: &mut [u8],
474) {
475 if copylen < 10usize {
476 BrotliWriteBits(
477 depth[copylen.wrapping_add(14)] as usize,
478 bits[copylen.wrapping_add(14)] as (u64),
479 storage_ix,
480 storage,
481 );
482 {
483 let _rhs = 1;
484 let _lhs = &mut histo[copylen.wrapping_add(14)];
485 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
486 }
487 } else if copylen < 134usize {
488 let tail: usize = copylen.wrapping_sub(6);
489 let nbits: u32 = Log2FloorNonZero(tail as u64).wrapping_sub(1);
490 let prefix: usize = tail >> nbits;
491 let code: usize = ((nbits << 1) as usize)
492 .wrapping_add(prefix)
493 .wrapping_add(20);
494 BrotliWriteBits(
495 depth[(code as usize)] as usize,
496 bits[(code as usize)] as (u64),
497 storage_ix,
498 storage,
499 );
500 BrotliWriteBits(
501 nbits as usize,
502 (tail as u64).wrapping_sub((prefix as u64) << nbits),
503 storage_ix,
504 storage,
505 );
506 {
507 let _rhs = 1;
508 let _lhs = &mut histo[(code as usize)];
509 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
510 }
511 } else if copylen < 2118usize {
512 let tail: usize = copylen.wrapping_sub(70);
513 let nbits: u32 = Log2FloorNonZero(tail as u64);
514 let code: usize = nbits.wrapping_add(28) as usize;
515 BrotliWriteBits(
516 depth[(code as usize)] as usize,
517 bits[(code as usize)] as (u64),
518 storage_ix,
519 storage,
520 );
521 BrotliWriteBits(
522 nbits as usize,
523 (tail as u64).wrapping_sub(1 << nbits),
524 storage_ix,
525 storage,
526 );
527 {
528 let _rhs = 1;
529 let _lhs = &mut histo[(code as usize)];
530 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
531 }
532 } else {
533 BrotliWriteBits(depth[39] as usize, bits[39] as (u64), storage_ix, storage);
534 BrotliWriteBits(
535 24usize,
536 (copylen as u64).wrapping_sub(2118),
537 storage_ix,
538 storage,
539 );
540 {
541 let _rhs = 1;
542 let _lhs = &mut histo[39];
543 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
544 }
545 }
546}
547
548fn ShouldMergeBlock(data: &[u8], len: usize, depths: &[u8]) -> bool {
549 let mut histo: [usize; 256] = [0; 256];
550 static kSampleRate: usize = 43usize;
551 let mut i: usize;
552 i = 0usize;
553 while i < len {
554 {
555 let _rhs = 1;
556 let _lhs = &mut histo[data[i] as usize];
557 *_lhs = (*_lhs).wrapping_add(_rhs as usize);
558 }
559 i = i.wrapping_add(kSampleRate);
560 }
561 {
562 let total: usize = len
563 .wrapping_add(kSampleRate)
564 .wrapping_sub(1)
565 .wrapping_div(kSampleRate);
566 let mut r: super::util::floatX = (FastLog2(total as u64) + 0.5 as super::util::floatX)
567 * total as (super::util::floatX)
568 + 200i32 as (super::util::floatX);
569 i = 0usize;
570 while i < 256usize {
571 {
572 r -= histo[i] as (super::util::floatX)
573 * (depths[i] as (super::util::floatX) + FastLog2(histo[i] as u64));
574 }
575 i = i.wrapping_add(1);
576 }
577 r >= 0.0
578 }
579}
580
581fn UpdateBits(mut n_bits: usize, mut bits: u32, mut pos: usize, array: &mut [u8]) {
582 while n_bits > 0usize {
583 let byte_pos: usize = pos >> 3;
584 let n_unchanged_bits: usize = pos & 7usize;
585 let n_changed_bits: usize =
586 brotli_min_size_t(n_bits, (8usize).wrapping_sub(n_unchanged_bits));
587 let total_bits: usize = n_unchanged_bits.wrapping_add(n_changed_bits);
588 let mask: u32 =
589 !(1u32 << total_bits).wrapping_sub(1) | (1u32 << n_unchanged_bits).wrapping_sub(1);
590 let unchanged_bits: u32 = array[byte_pos] as u32 & mask;
591 let changed_bits: u32 = bits & (1u32 << n_changed_bits).wrapping_sub(1);
592 array[byte_pos] = (changed_bits << n_unchanged_bits | unchanged_bits) as u8;
593 n_bits = n_bits.wrapping_sub(n_changed_bits);
594 bits >>= n_changed_bits;
595 pos = pos.wrapping_add(n_changed_bits);
596 }
597}
598
599fn BuildAndStoreCommandPrefixCode(
600 histogram: &[u32],
601 depth: &mut [u8],
602 bits: &mut [u16],
603 storage_ix: &mut usize,
604 storage: &mut [u8],
605) {
606 let mut tree: [HuffmanTree; 129] = [NewHuffmanTree(0, 0, 0); 129];
607 let mut cmd_depth: [u8; 704] = [0u8; 704];
608
609 let mut cmd_bits: [u16; 64] = [0; 64];
610 BrotliCreateHuffmanTree(histogram, 64usize, 15i32, &mut tree[..], depth);
611 BrotliCreateHuffmanTree(
612 &histogram[64..],
613 64usize,
614 14i32,
615 &mut tree[..],
616 &mut depth[64..],
617 );
618 memcpy(&mut cmd_depth[..], 0, depth, 0, 24usize);
624 memcpy(&mut cmd_depth[..], 24usize, depth, (40usize), 8usize);
625 memcpy(&mut cmd_depth[..], 32usize, depth, (24usize), 8usize);
626 memcpy(&mut cmd_depth[..], 40usize, depth, (48usize), 8usize);
627 memcpy(&mut cmd_depth[..], 48usize, depth, (32usize), 8usize);
628 memcpy(&mut cmd_depth[..], 56usize, depth, (56usize), 8usize);
629 BrotliConvertBitDepthsToSymbols(&mut cmd_depth[..], 64usize, &mut cmd_bits[..]);
630 memcpy(bits, 0, &cmd_bits[..], 0, 24usize);
631 memcpy(bits, (24usize), &cmd_bits[..], 32usize, 8usize);
632 memcpy(bits, (32usize), &cmd_bits[..], 48usize, 8usize);
633 memcpy(bits, (40usize), &cmd_bits[..], 24usize, 8usize);
634 memcpy(bits, (48usize), &cmd_bits[..], 40usize, 8usize);
635 memcpy(bits, (56usize), &cmd_bits[..], 56usize, 8usize);
636 BrotliConvertBitDepthsToSymbols(&mut depth[64..], 64usize, &mut bits[64..]);
637 {
638 let mut i: usize;
639 for item in cmd_depth[..64].iter_mut() {
640 *item = 0;
641 }
642 memcpy(&mut cmd_depth[..], 0, depth, 0, 8usize);
643 memcpy(&mut cmd_depth[..], 64usize, depth, (8usize), 8usize);
644 memcpy(&mut cmd_depth[..], 128usize, depth, (16usize), 8usize);
645 memcpy(&mut cmd_depth[..], 192usize, depth, (24usize), 8usize);
646 memcpy(&mut cmd_depth[..], 384usize, depth, (32usize), 8usize);
647 i = 0usize;
648 while i < 8usize {
649 {
650 cmd_depth[(128usize).wrapping_add((8usize).wrapping_mul(i))] =
651 depth[i.wrapping_add(40)];
652 cmd_depth[(256usize).wrapping_add((8usize).wrapping_mul(i))] =
653 depth[i.wrapping_add(48)];
654 cmd_depth[(448usize).wrapping_add((8usize).wrapping_mul(i))] =
655 depth[i.wrapping_add(56)];
656 }
657 i = i.wrapping_add(1);
658 }
659 BrotliStoreHuffmanTree(
660 &mut cmd_depth[..],
661 704usize,
662 &mut tree[..],
663 storage_ix,
664 storage,
665 );
666 }
667 BrotliStoreHuffmanTree(
668 &mut depth[64..],
669 64usize,
670 &mut tree[..],
671 storage_ix,
672 storage,
673 );
674}
675
676#[allow(unused_assignments)]
677fn BrotliCompressFragmentFastImpl<AllocHT: alloc::Allocator<HuffmanTree>>(
678 m: &mut AllocHT,
679 input_ptr: &[u8],
680 mut input_size: usize,
681 is_last: i32,
682 table: &mut [i32],
683 table_bits: usize,
684 cmd_depth: &mut [u8],
685 cmd_bits: &mut [u16],
686 cmd_code_numbits: &mut usize,
687 cmd_code: &mut [u8],
688 storage_ix: &mut usize,
689 storage: &mut [u8],
690) {
691 let mut cmd_histo = [0u32; 128];
692 let mut ip_end = 0usize;
693 let mut next_emit = 0usize;
694 let base_ip = 0usize;
695 static kFirstBlockSize: usize = (3i32 << 15) as usize;
696 static kMergeBlockSize: usize = (1i32 << 16) as usize;
697 let kInputMarginBytes = 16usize;
698 let kMinMatchLen = 5usize;
699 let mut metablock_start = 0usize;
700 let mut block_size = brotli_min_size_t(input_size, kFirstBlockSize);
701 let mut total_block_size = block_size;
702 let mut mlen_storage_ix = storage_ix.wrapping_add(3);
703 let mut lit_depth = [0u8; 256];
704 let mut lit_bits = [0u16; 256];
705 let mut literal_ratio: usize;
706 let mut input_index = 0usize;
707 let mut last_distance: i32;
708 let shift: usize = (64u32 as usize).wrapping_sub(table_bits);
709 BrotliStoreMetaBlockHeader(block_size, 0i32, storage_ix, storage);
710 BrotliWriteBits(13usize, 0, storage_ix, storage);
711 literal_ratio = BuildAndStoreLiteralPrefixCode(
712 m,
713 &input_ptr[input_index..],
714 block_size,
715 &mut lit_depth[..],
716 &mut lit_bits[..],
717 storage_ix,
718 storage,
719 );
720 {
721 let mut i = 0usize;
722 while i.wrapping_add(7) < *cmd_code_numbits {
723 BrotliWriteBits(8usize, cmd_code[i >> 3] as u64, storage_ix, storage);
724 i = i.wrapping_add(8);
725 }
726 }
727 BrotliWriteBits(
728 *cmd_code_numbits & 7usize,
729 cmd_code[*cmd_code_numbits >> 3] as u64,
730 storage_ix,
731 storage,
732 );
733 let mut code_block_selection: CodeBlockState = CodeBlockState::EMIT_COMMANDS;
734 'continue_to_next_block: loop {
735 let mut ip_index: usize;
736 if code_block_selection == CodeBlockState::EMIT_COMMANDS {
737 cmd_histo[..128].clone_from_slice(&kCmdHistoSeed[..]);
738 ip_index = input_index;
739 last_distance = -1i32;
740 ip_end = input_index.wrapping_add(block_size);
741 if block_size >= kInputMarginBytes {
742 let len_limit: usize = brotli_min_size_t(
743 block_size.wrapping_sub(kMinMatchLen),
744 input_size.wrapping_sub(kInputMarginBytes),
745 );
746 let ip_limit: usize = input_index.wrapping_add(len_limit);
747 let mut next_hash = Hash(
748 &input_ptr[{
749 ip_index = ip_index.wrapping_add(1);
750 ip_index
751 }..],
752 shift,
753 );
754 loop {
755 let mut skip = 32u32;
756 let mut next_ip = ip_index;
757 let mut candidate = 0usize;
758 loop {
759 {
760 'break15: loop {
761 {
762 let hash = next_hash;
763 let bytes_between_hash_lookups: u32 = skip >> 5;
764 skip = skip.wrapping_add(1);
765 ip_index = next_ip;
766 next_ip =
767 ip_index.wrapping_add(bytes_between_hash_lookups as usize);
768 if next_ip > ip_limit {
769 code_block_selection = CodeBlockState::EMIT_REMAINDER;
770 break 'break15;
771 }
772 next_hash = Hash(&input_ptr[next_ip..], shift);
773 candidate = ip_index.wrapping_sub(last_distance as usize);
774 if IsMatch(&input_ptr[ip_index..], &input_ptr[candidate..])
775 && candidate < ip_index
776 {
777 table[hash as usize] =
778 ip_index.wrapping_sub(base_ip) as i32;
779 break 'break15;
780 }
781 candidate = base_ip.wrapping_add(table[hash as usize] as usize);
782 table[hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
783 }
784 if IsMatch(&input_ptr[ip_index..], &input_ptr[candidate..]) {
785 break;
786 }
787 }
788 }
789 if !(ip_index.wrapping_sub(candidate)
790 > (1usize << 18).wrapping_sub(16) as isize as usize
791 && (code_block_selection as i32
792 == CodeBlockState::EMIT_COMMANDS as i32))
793 {
794 break;
795 }
796 }
797 if code_block_selection as i32 != CodeBlockState::EMIT_COMMANDS as i32 {
798 break;
799 }
800 {
801 let base: usize = ip_index;
802 let matched = (5usize).wrapping_add(FindMatchLengthWithLimit(
803 &input_ptr[candidate + 5..],
804 &input_ptr[ip_index + 5..],
805 ip_end.wrapping_sub(ip_index).wrapping_sub(5),
806 ));
807 let distance = base.wrapping_sub(candidate) as i32;
808 let insert = base.wrapping_sub(next_emit);
809 ip_index = ip_index.wrapping_add(matched);
810 if insert < 6210 {
811 EmitInsertLen(
812 insert,
813 cmd_depth,
814 cmd_bits,
815 &mut cmd_histo[..],
816 storage_ix,
817 storage,
818 );
819 } else if ShouldUseUncompressedMode(
820 (next_emit as isize) - (metablock_start as isize),
821 insert,
822 literal_ratio,
823 ) {
824 EmitUncompressedMetaBlock(
825 &input_ptr[metablock_start..],
826 base.wrapping_sub(metablock_start),
827 mlen_storage_ix.wrapping_sub(3),
828 storage_ix,
829 storage,
830 );
831 input_size = input_size.wrapping_sub(base.wrapping_sub(input_index));
832 input_index = base;
833 next_emit = input_index;
834 code_block_selection = CodeBlockState::NEXT_BLOCK;
835 continue 'continue_to_next_block;
836 } else {
837 EmitLongInsertLen(
838 insert,
839 cmd_depth,
840 cmd_bits,
841 &mut cmd_histo[..],
842 storage_ix,
843 storage,
844 );
845 }
846 EmitLiterals(
847 &input_ptr[next_emit..],
848 insert,
849 &mut lit_depth[..],
850 &mut lit_bits[..],
851 storage_ix,
852 storage,
853 );
854 if distance == last_distance {
855 BrotliWriteBits(
856 cmd_depth[64] as usize,
857 cmd_bits[64] as u64,
858 storage_ix,
859 storage,
860 );
861 {
862 let _rhs = 1u32;
863 let _lhs = &mut cmd_histo[64];
864 *_lhs = (*_lhs).wrapping_add(_rhs);
865 }
866 } else {
867 EmitDistance(
868 distance as usize,
869 cmd_depth,
870 cmd_bits,
871 &mut cmd_histo[..],
872 storage_ix,
873 storage,
874 );
875 last_distance = distance;
876 }
877 EmitCopyLenLastDistance(
878 matched,
879 cmd_depth,
880 cmd_bits,
881 &mut cmd_histo[..],
882 storage_ix,
883 storage,
884 );
885 next_emit = ip_index;
886 if ip_index >= ip_limit {
887 code_block_selection = CodeBlockState::EMIT_REMAINDER;
888 continue 'continue_to_next_block;
889 }
890 {
891 assert!(ip_index >= 3);
892 let input_bytes: u64 =
893 BROTLI_UNALIGNED_LOAD64(&input_ptr[ip_index - 3..]);
894 let mut prev_hash: u32 = HashBytesAtOffset(input_bytes, 0i32, shift);
895 let cur_hash: u32 = HashBytesAtOffset(input_bytes, 3i32, shift);
896 table[prev_hash as usize] =
897 ip_index.wrapping_sub(base_ip).wrapping_sub(3) as i32;
898 prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift);
899 table[prev_hash as usize] =
900 ip_index.wrapping_sub(base_ip).wrapping_sub(2) as i32;
901 prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift);
902 table[prev_hash as usize] =
903 ip_index.wrapping_sub(base_ip).wrapping_sub(1) as i32;
904 candidate = base_ip.wrapping_add(table[cur_hash as usize] as usize);
905 table[cur_hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
906 }
907 }
908 while IsMatch(&input_ptr[ip_index..], &input_ptr[candidate..]) {
909 let base: usize = ip_index;
910 let matched: usize = (5usize).wrapping_add(FindMatchLengthWithLimit(
911 &input_ptr[candidate + 5..],
912 &input_ptr[ip_index + 5..],
913 ip_end.wrapping_sub(ip_index).wrapping_sub(5),
914 ));
915 if ip_index.wrapping_sub(candidate) > (1usize << 18).wrapping_sub(16) {
916 break;
917 }
918 ip_index = ip_index.wrapping_add(matched);
919 last_distance = base.wrapping_sub(candidate) as i32;
920 EmitCopyLen(
921 matched,
922 cmd_depth,
923 cmd_bits,
924 &mut cmd_histo[..],
925 storage_ix,
926 storage,
927 );
928 EmitDistance(
929 last_distance as usize,
930 cmd_depth,
931 cmd_bits,
932 &mut cmd_histo[..],
933 storage_ix,
934 storage,
935 );
936 next_emit = ip_index;
937 if ip_index >= ip_limit {
938 code_block_selection = CodeBlockState::EMIT_REMAINDER;
939 continue 'continue_to_next_block;
940 }
941 {
942 assert!(ip_index >= 3);
943 let input_bytes: u64 =
944 BROTLI_UNALIGNED_LOAD64(&input_ptr[ip_index - 3..]);
945 let mut prev_hash: u32 = HashBytesAtOffset(input_bytes, 0i32, shift);
946 let cur_hash: u32 = HashBytesAtOffset(input_bytes, 3i32, shift);
947 table[prev_hash as usize] =
948 ip_index.wrapping_sub(base_ip).wrapping_sub(3) as i32;
949 prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift);
950 table[prev_hash as usize] =
951 ip_index.wrapping_sub(base_ip).wrapping_sub(2) as i32;
952 prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift);
953 table[prev_hash as usize] =
954 ip_index.wrapping_sub(base_ip).wrapping_sub(1) as i32;
955 candidate = base_ip.wrapping_add(table[cur_hash as usize] as usize);
956 table[cur_hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
957 }
958 }
959 if code_block_selection as i32 == CodeBlockState::EMIT_REMAINDER as i32 {
960 break;
961 }
962 if code_block_selection as i32 == CodeBlockState::EMIT_COMMANDS as i32 {
963 next_hash = Hash(
964 &input_ptr[{
965 ip_index = ip_index.wrapping_add(1);
966 ip_index
967 }..],
968 shift,
969 );
970 }
971 }
972 }
973 code_block_selection = CodeBlockState::EMIT_REMAINDER;
974 continue 'continue_to_next_block;
975 } else if code_block_selection as i32 == CodeBlockState::EMIT_REMAINDER as i32 {
976 input_index = input_index.wrapping_add(block_size);
977 input_size = input_size.wrapping_sub(block_size);
978 block_size = brotli_min_size_t(input_size, kMergeBlockSize);
979 if input_size > 0
980 && (total_block_size.wrapping_add(block_size) <= (1i32 << 20) as usize)
981 && ShouldMergeBlock(&input_ptr[input_index..], block_size, &mut lit_depth[..])
982 {
983 total_block_size = total_block_size.wrapping_add(block_size);
984 UpdateBits(
985 20usize,
986 total_block_size.wrapping_sub(1) as u32,
987 mlen_storage_ix,
988 storage,
989 );
990 code_block_selection = CodeBlockState::EMIT_COMMANDS;
991 continue 'continue_to_next_block;
992 }
993 if next_emit < ip_end {
994 let insert: usize = ip_end.wrapping_sub(next_emit);
995 if insert < 6210 {
996 EmitInsertLen(
997 insert,
998 cmd_depth,
999 cmd_bits,
1000 &mut cmd_histo[..],
1001 storage_ix,
1002 storage,
1003 );
1004 EmitLiterals(
1005 &input_ptr[next_emit..],
1006 insert,
1007 &mut lit_depth[..],
1008 &mut lit_bits[..],
1009 storage_ix,
1010 storage,
1011 );
1012 } else if ShouldUseUncompressedMode(
1013 next_emit as isize - metablock_start as isize,
1014 insert,
1015 literal_ratio,
1016 ) {
1017 EmitUncompressedMetaBlock(
1018 &input_ptr[metablock_start..],
1019 ip_end.wrapping_sub(metablock_start),
1020 mlen_storage_ix.wrapping_sub(3),
1021 storage_ix,
1022 storage,
1023 );
1024 } else {
1025 EmitLongInsertLen(
1026 insert,
1027 cmd_depth,
1028 cmd_bits,
1029 &mut cmd_histo[..],
1030 storage_ix,
1031 storage,
1032 );
1033 EmitLiterals(
1034 &input_ptr[next_emit..],
1035 insert,
1036 &mut lit_depth[..],
1037 &mut lit_bits[..],
1038 storage_ix,
1039 storage,
1040 );
1041 }
1042 }
1043 next_emit = ip_end;
1044 code_block_selection = CodeBlockState::NEXT_BLOCK;
1045 continue 'continue_to_next_block;
1046 } else if code_block_selection as i32 == CodeBlockState::NEXT_BLOCK as i32 {
1047 if input_size > 0 {
1048 metablock_start = input_index;
1049 block_size = brotli_min_size_t(input_size, kFirstBlockSize);
1050 total_block_size = block_size;
1051 mlen_storage_ix = storage_ix.wrapping_add(3);
1052 BrotliStoreMetaBlockHeader(block_size, 0i32, storage_ix, storage);
1053 BrotliWriteBits(13usize, 0, storage_ix, storage);
1054 literal_ratio = BuildAndStoreLiteralPrefixCode(
1055 m,
1056 &input_ptr[input_index..],
1057 block_size,
1058 &mut lit_depth[..],
1059 &mut lit_bits[..],
1060 storage_ix,
1061 storage,
1062 );
1063 BuildAndStoreCommandPrefixCode(
1064 &mut cmd_histo[..],
1065 cmd_depth,
1066 cmd_bits,
1067 storage_ix,
1068 storage,
1069 );
1070 code_block_selection = CodeBlockState::EMIT_COMMANDS;
1071 continue 'continue_to_next_block;
1072 }
1073 break;
1074 }
1075 }
1076 if is_last == 0 {
1077 cmd_code[0] = 0;
1078 *cmd_code_numbits = 0;
1079 BuildAndStoreCommandPrefixCode(
1080 &mut cmd_histo[..],
1081 cmd_depth,
1082 cmd_bits,
1083 cmd_code_numbits,
1084 cmd_code,
1085 );
1086 }
1087}
1088
1089macro_rules! compress_specialization {
1090 ($table_bits : expr, $fname: ident) => {
1091 fn $fname<AllocHT: alloc::Allocator<HuffmanTree>>(
1092 mht: &mut AllocHT,
1093 input: &[u8],
1094 input_size: usize,
1095 is_last: i32,
1096 table: &mut [i32],
1097 cmd_depth: &mut [u8],
1098 cmd_bits: &mut [u16],
1099 cmd_code_numbits: &mut usize,
1100 cmd_code: &mut [u8],
1101 storage_ix: &mut usize,
1102 storage: &mut [u8],
1103 ) {
1104 BrotliCompressFragmentFastImpl(
1105 mht,
1106 input,
1107 input_size,
1108 is_last,
1109 table,
1110 $table_bits,
1111 cmd_depth,
1112 cmd_bits,
1113 cmd_code_numbits,
1114 cmd_code,
1115 storage_ix,
1116 storage,
1117 );
1118 }
1119 };
1120}
1121
1122compress_specialization!(9, BrotliCompressFragmentFastImpl9);
1123compress_specialization!(11, BrotliCompressFragmentFastImpl11);
1124compress_specialization!(13, BrotliCompressFragmentFastImpl13);
1125compress_specialization!(15, BrotliCompressFragmentFastImpl15);
1126
1127pub fn BrotliCompressFragmentFast<AllocHT: alloc::Allocator<HuffmanTree>>(
1128 m: &mut AllocHT,
1129 input: &[u8],
1130 input_size: usize,
1131 is_last: i32,
1132 table: &mut [i32],
1133 table_size: usize,
1134 cmd_depth: &mut [u8],
1135 cmd_bits: &mut [u16],
1136 cmd_code_numbits: &mut usize,
1137 cmd_code: &mut [u8],
1138 storage_ix: &mut usize,
1139 storage: &mut [u8],
1140) {
1141 let initial_storage_ix: usize = *storage_ix;
1142 let table_bits: usize = Log2FloorNonZero(table_size as u64) as usize;
1143 if input_size == 0usize {
1144 0i32;
1145 BrotliWriteBits(1usize, 1, storage_ix, storage);
1146 BrotliWriteBits(1usize, 1, storage_ix, storage);
1147 *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
1148 return;
1149 }
1150 if table_bits == 9usize {
1151 BrotliCompressFragmentFastImpl9(
1152 m,
1153 input,
1154 input_size,
1155 is_last,
1156 table,
1157 cmd_depth,
1158 cmd_bits,
1159 cmd_code_numbits,
1160 cmd_code,
1161 storage_ix,
1162 storage,
1163 );
1164 }
1165 if table_bits == 11usize {
1166 BrotliCompressFragmentFastImpl11(
1167 m,
1168 input,
1169 input_size,
1170 is_last,
1171 table,
1172 cmd_depth,
1173 cmd_bits,
1174 cmd_code_numbits,
1175 cmd_code,
1176 storage_ix,
1177 storage,
1178 );
1179 }
1180 if table_bits == 13usize {
1181 BrotliCompressFragmentFastImpl13(
1182 m,
1183 input,
1184 input_size,
1185 is_last,
1186 table,
1187 cmd_depth,
1188 cmd_bits,
1189 cmd_code_numbits,
1190 cmd_code,
1191 storage_ix,
1192 storage,
1193 );
1194 }
1195 if table_bits == 15usize {
1196 BrotliCompressFragmentFastImpl15(
1197 m,
1198 input,
1199 input_size,
1200 is_last,
1201 table,
1202 cmd_depth,
1203 cmd_bits,
1204 cmd_code_numbits,
1205 cmd_code,
1206 storage_ix,
1207 storage,
1208 );
1209 }
1210 if storage_ix.wrapping_sub(initial_storage_ix) > (31usize).wrapping_add(input_size << 3) {
1211 EmitUncompressedMetaBlock(input, input_size, initial_storage_ix, storage_ix, storage);
1212 }
1213 if is_last != 0 {
1214 BrotliWriteBits(1usize, 1, storage_ix, storage);
1215 BrotliWriteBits(1usize, 1, storage_ix, storage);
1216 *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
1217 }
1218}