1#![allow(dead_code)]
2use super::backward_references::kHashMul32;
3use super::super::alloc;
5use super::bit_cost::BitsEntropy;
6use super::brotli_bit_stream::{BrotliBuildAndStoreHuffmanTreeFast, BrotliStoreHuffmanTree};
7use super::entropy_encode::{
8 BrotliConvertBitDepthsToSymbols, BrotliCreateHuffmanTree, HuffmanTree, NewHuffmanTree,
9};
10use super::static_dict::{
11 FindMatchLengthWithLimit, BROTLI_UNALIGNED_LOAD32, BROTLI_UNALIGNED_LOAD64,
12 BROTLI_UNALIGNED_STORE64,
13};
14use super::util::{brotli_min_size_t, Log2FloorNonZero};
15use core;
16static kCompressFragmentTwoPassBlockSize: usize = (1i32 << 17) as usize;
17
18fn EmitInsertLen(insertlen: u32, commands: &mut &mut [u32]) -> usize {
20 if insertlen < 6u32 {
21 (*commands)[0] = insertlen;
22 } else if insertlen < 130u32 {
23 let tail: u32 = insertlen.wrapping_sub(2);
24 let nbits: u32 = Log2FloorNonZero(tail as (u64)).wrapping_sub(1);
25 let prefix: u32 = tail >> nbits;
26 let inscode: u32 = (nbits << 1).wrapping_add(prefix).wrapping_add(2);
27 let extra: u32 = tail.wrapping_sub(prefix << nbits);
28 (*commands)[0] = inscode | extra << 8;
29 } else if insertlen < 2114u32 {
30 let tail: u32 = insertlen.wrapping_sub(66);
31 let nbits: u32 = Log2FloorNonZero(tail as (u64));
32 let code: u32 = nbits.wrapping_add(10);
33 let extra: u32 = tail.wrapping_sub(1u32 << nbits);
34 (*commands)[0] = code | extra << 8;
35 } else if insertlen < 6210u32 {
36 let extra: u32 = insertlen.wrapping_sub(2114);
37 (*commands)[0] = 21u32 | extra << 8;
38 } else if insertlen < 22594u32 {
39 let extra: u32 = insertlen.wrapping_sub(6210);
40 (*commands)[0] = 22u32 | extra << 8;
41 } else {
42 let extra: u32 = insertlen.wrapping_sub(22594);
43 (*commands)[0] = 23u32 | extra << 8;
44 }
45 let remainder = core::mem::take(commands);
46 let _ = core::mem::replace(commands, &mut remainder[1..]);
47 1
48}
49
50fn EmitDistance(distance: u32, commands: &mut &mut [u32]) -> usize {
51 let d: u32 = distance.wrapping_add(3);
52 let nbits: u32 = Log2FloorNonZero(d as (u64)).wrapping_sub(1);
53 let prefix: u32 = d >> nbits & 1u32;
54 let offset: u32 = (2u32).wrapping_add(prefix) << nbits;
55 let distcode: u32 = (2u32)
56 .wrapping_mul(nbits.wrapping_sub(1))
57 .wrapping_add(prefix)
58 .wrapping_add(80);
59 let extra: u32 = d.wrapping_sub(offset);
60 (*commands)[0] = distcode | extra << 8;
61 let remainder = core::mem::take(commands);
62 let _ = core::mem::replace(commands, &mut remainder[1..]);
63 1
64}
65
66fn EmitCopyLenLastDistance(copylen: usize, commands: &mut &mut [u32]) -> usize {
67 if copylen < 12usize {
68 (*commands)[0] = copylen.wrapping_add(20) as u32;
69 let remainder = core::mem::take(commands);
70 let _ = core::mem::replace(commands, &mut remainder[1..]);
71 1
72 } else if copylen < 72usize {
73 let tail: usize = copylen.wrapping_sub(8);
74 let nbits: usize = Log2FloorNonZero(tail as u64).wrapping_sub(1) as usize;
75 let prefix: usize = tail >> nbits;
76 let code: usize = (nbits << 1).wrapping_add(prefix).wrapping_add(28);
77 let extra: usize = tail.wrapping_sub(prefix << nbits);
78 (*commands)[0] = (code | extra << 8) as u32;
79 let remainder = core::mem::take(commands);
80 let _ = core::mem::replace(commands, &mut remainder[1..]);
81 1
82 } else if copylen < 136usize {
83 let tail: usize = copylen.wrapping_sub(8);
84 let code: usize = (tail >> 5).wrapping_add(54);
85 let extra: usize = tail & 31usize;
86 (*commands)[0] = (code | extra << 8) as u32;
87 let remainder = core::mem::take(commands);
88 let _ = core::mem::replace(commands, &mut remainder[1..]);
89 (*commands)[0] = 64u32;
90 let remainder2 = core::mem::take(commands);
91 let _ = core::mem::replace(commands, &mut remainder2[1..]);
92 2
93 } else if copylen < 2120usize {
94 let tail: usize = copylen.wrapping_sub(72);
95 let nbits: usize = Log2FloorNonZero(tail as u64) as usize;
96 let code: usize = nbits.wrapping_add(52);
97 let extra: usize = tail.wrapping_sub(1usize << nbits);
98 (*commands)[0] = (code | extra << 8) as u32;
99 let remainder = core::mem::take(commands);
100 let _ = core::mem::replace(commands, &mut remainder[1..]);
101 (*commands)[0] = 64u32;
102 let remainder2 = core::mem::take(commands);
103 let _ = core::mem::replace(commands, &mut remainder2[1..]);
104 2
105 } else {
106 let extra: usize = copylen.wrapping_sub(2120);
107 (*commands)[0] = (63usize | extra << 8) as u32;
108 let remainder = core::mem::take(commands);
109 let _ = core::mem::replace(commands, &mut remainder[1..]);
110 (*commands)[0] = 64u32;
111 let remainder2 = core::mem::take(commands);
112 let _ = core::mem::replace(commands, &mut remainder2[1..]);
113 2
114 }
115}
116fn HashBytesAtOffset(v: u64, offset: i32, shift: usize, length: usize) -> u32 {
117 0i32;
118 0i32;
119 {
120 let h: u64 = (v >> (8i32 * offset) << ((8 - length) * 8)).wrapping_mul(kHashMul32 as (u64));
121 (h >> shift) as u32
122 }
123}
124
125fn EmitCopyLen(copylen: usize, commands: &mut &mut [u32]) -> usize {
126 if copylen < 10usize {
127 (*commands)[0] = copylen.wrapping_add(38) as u32;
128 } else if copylen < 134usize {
129 let tail: usize = copylen.wrapping_sub(6);
130 let nbits: usize = Log2FloorNonZero(tail as u64).wrapping_sub(1) as usize;
131 let prefix: usize = tail >> nbits;
132 let code: usize = (nbits << 1).wrapping_add(prefix).wrapping_add(44);
133 let extra: usize = tail.wrapping_sub(prefix << nbits);
134 (*commands)[0] = (code | extra << 8) as u32;
135 } else if copylen < 2118usize {
136 let tail: usize = copylen.wrapping_sub(70);
137 let nbits: usize = Log2FloorNonZero(tail as u64) as usize;
138 let code: usize = nbits.wrapping_add(52);
139 let extra: usize = tail.wrapping_sub(1usize << nbits);
140 (*commands)[0] = (code | extra << 8) as u32;
141 } else {
142 let extra: usize = copylen.wrapping_sub(2118);
143 (*commands)[0] = (63usize | extra << 8) as u32;
144 }
145 let remainder = core::mem::take(commands);
146 let _ = core::mem::replace(commands, &mut remainder[1..]);
147 1
148}
149fn Hash(p: &[u8], shift: usize, length: usize) -> u32 {
150 let h: u64 =
151 (BROTLI_UNALIGNED_LOAD64(p) << ((8 - length) * 8)).wrapping_mul(kHashMul32 as (u64));
152 (h >> shift) as u32
153}
154
155fn IsMatch(p1: &[u8], p2: &[u8], length: usize) -> bool {
156 BROTLI_UNALIGNED_LOAD32(p1) == BROTLI_UNALIGNED_LOAD32(p2)
157 && (length == 4 || (p1[4] == p2[4] && p1[5] == p2[5]))
158}
159
160#[allow(unused_assignments)]
161fn CreateCommands(
162 input_index: usize,
163 block_size: usize,
164 input_size: usize,
165 base_ip: &[u8],
166 table: &mut [i32],
167 table_bits: usize,
168 min_match: usize,
169 literals: &mut &mut [u8],
170 num_literals: &mut usize,
171 commands: &mut &mut [u32],
172 num_commands: &mut usize,
173) {
174 let mut ip_index: usize = input_index;
175 let shift: usize = (64u32 as usize).wrapping_sub(table_bits);
176 let ip_end: usize = input_index.wrapping_add(block_size);
177 let mut next_emit: usize = input_index;
178 let mut last_distance: i32 = -1i32;
179 let kInputMarginBytes: usize = 16usize;
180
181 if block_size >= kInputMarginBytes {
182 let len_limit: usize = brotli_min_size_t(
183 block_size.wrapping_sub(min_match),
184 input_size.wrapping_sub(kInputMarginBytes),
185 );
186 let ip_limit: usize = input_index.wrapping_add(len_limit);
187 let mut next_hash: u32;
188 let mut goto_emit_remainder: i32 = 0i32;
189 next_hash = Hash(
190 &base_ip[{
191 ip_index = ip_index.wrapping_add(1);
192 ip_index
193 }..],
194 shift,
195 min_match,
196 );
197 while goto_emit_remainder == 0 {
198 let mut skip: u32 = 32u32;
199 let mut next_ip: usize = ip_index;
200 let mut candidate: usize = 0;
201 0i32;
202 loop {
203 {
204 'break3: loop {
205 {
206 let hash: u32 = next_hash;
207 let bytes_between_hash_lookups: u32 = skip >> 5;
208 skip = skip.wrapping_add(1);
209 ip_index = next_ip;
210 0i32;
211 next_ip = ip_index.wrapping_add(bytes_between_hash_lookups as usize);
212 if next_ip > ip_limit {
213 goto_emit_remainder = 1i32;
214 {
215 {
216 break 'break3;
217 }
218 }
219 }
220 next_hash = Hash(&base_ip[next_ip..], shift, min_match);
221 0i32;
222 candidate = ip_index.wrapping_sub(last_distance as usize);
223 if IsMatch(&base_ip[ip_index..], &base_ip[candidate..], min_match)
224 && candidate < ip_index
225 {
226 table[(hash as usize)] = ip_index.wrapping_sub(0) as i32;
227 {
228 {
229 break 'break3;
230 }
231 }
232 }
233 candidate = table[(hash as usize)] as usize;
234 0i32;
235 0i32;
236 table[(hash as usize)] = ip_index.wrapping_sub(0) as i32;
237 }
238 if IsMatch(&base_ip[ip_index..], &base_ip[candidate..], min_match) {
239 break;
240 }
241 }
242 }
243 if !(ip_index.wrapping_sub(candidate)
244 > (1usize << 18).wrapping_sub(16) as isize as usize
245 && (goto_emit_remainder == 0))
246 {
247 break;
248 }
249 }
250 if goto_emit_remainder != 0 {
251 {
252 break;
253 }
254 }
255 {
256 let base: usize = ip_index;
257 let matched: usize = min_match.wrapping_add(FindMatchLengthWithLimit(
258 &base_ip[(candidate + min_match)..],
259 &base_ip[(ip_index + min_match)..],
260 ip_end.wrapping_sub(ip_index).wrapping_sub(min_match),
261 ));
262 let distance: i32 = base.wrapping_sub(candidate) as i32;
263 let insert: i32 = base.wrapping_sub(next_emit) as i32;
264 ip_index = ip_index.wrapping_add(matched);
265 0i32;
266 *num_commands += EmitInsertLen(insert as u32, commands);
267 (*literals)[..(insert as usize)]
268 .clone_from_slice(&base_ip[next_emit..(next_emit + insert as usize)]);
269 *num_literals += insert as usize;
270 let new_literals = core::mem::take(literals);
271 let _ = core::mem::replace(literals, &mut new_literals[(insert as usize)..]);
272 if distance == last_distance {
273 (*commands)[0] = 64u32;
274 let remainder = core::mem::take(commands);
275 let _ = core::mem::replace(commands, &mut remainder[1..]);
276 *num_commands += 1;
277 } else {
278 *num_commands += EmitDistance(distance as u32, commands);
279 last_distance = distance;
280 }
281 *num_commands += EmitCopyLenLastDistance(matched, commands);
282 next_emit = ip_index;
283 if ip_index >= ip_limit {
284 goto_emit_remainder = 1i32;
285 {
286 {
287 break;
288 }
289 }
290 }
291 {
292 let mut input_bytes: u64;
293 let mut prev_hash: u32;
294 let cur_hash: u32;
295 if min_match == 4 {
296 input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index - 3)..]);
297 cur_hash = HashBytesAtOffset(input_bytes, 3i32, shift, min_match);
298 prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
299 table[(prev_hash as usize)] = ip_index.wrapping_sub(3) as i32;
300 prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
301 table[(prev_hash as usize)] = ip_index.wrapping_sub(2) as i32;
302 prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
303 table[(prev_hash as usize)] = ip_index.wrapping_sub(1) as i32;
304 } else {
305 assert!(ip_index >= 5);
306 input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index - 5)..]);
308 prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
309 table[(prev_hash as usize)] = ip_index.wrapping_sub(5) as i32;
310 prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
311 table[(prev_hash as usize)] = ip_index.wrapping_sub(4) as i32;
312 prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
313 table[(prev_hash as usize)] = ip_index.wrapping_sub(3) as i32;
314 assert!(ip_index >= 2);
315 input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index - 2)..]);
316 cur_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
317 prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
318 table[(prev_hash as usize)] = ip_index.wrapping_sub(2) as i32;
319 prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
320 table[(prev_hash as usize)] = ip_index.wrapping_sub(1) as i32;
321 }
322 candidate = table[(cur_hash as usize)] as usize;
323 table[(cur_hash as usize)] = ip_index as i32;
324 }
325 }
326 while ip_index.wrapping_sub(candidate)
327 <= (1usize << 18).wrapping_sub(16) as isize as usize
328 && IsMatch(&base_ip[ip_index..], &base_ip[candidate..], min_match)
329 {
330 let base_index: usize = ip_index;
331 let matched: usize = min_match.wrapping_add(FindMatchLengthWithLimit(
332 &base_ip[(candidate + min_match)..],
333 &base_ip[(ip_index + min_match)..],
334 ip_end.wrapping_sub(ip_index).wrapping_sub(min_match),
335 ));
336 ip_index = ip_index.wrapping_add(matched);
337 last_distance = base_index.wrapping_sub(candidate) as i32;
338 0i32;
339 *num_commands += EmitCopyLen(matched, commands);
340 *num_commands += EmitDistance(last_distance as u32, commands);
341 next_emit = ip_index;
342 if ip_index >= ip_limit {
343 goto_emit_remainder = 1i32;
344 {
345 {
346 break;
347 }
348 }
349 }
350 {
351 assert!(ip_index >= 5);
352 let mut input_bytes: u64;
353
354 let cur_hash: u32;
355 let mut prev_hash: u32;
356 if min_match == 4 {
357 input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index - 3)..]);
358 cur_hash = HashBytesAtOffset(input_bytes, 3i32, shift, min_match);
359 prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
360 table[(prev_hash as usize)] = ip_index.wrapping_sub(3) as i32;
361 prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
362 table[(prev_hash as usize)] = ip_index.wrapping_sub(2) as i32;
363 prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
364 table[(prev_hash as usize)] = ip_index.wrapping_sub(1) as i32;
365 } else {
366 input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index - 5)..]);
367 prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
368 table[(prev_hash as usize)] = ip_index.wrapping_sub(5) as i32;
369 prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
370 table[(prev_hash as usize)] = ip_index.wrapping_sub(4) as i32;
371 prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
372 table[(prev_hash as usize)] = ip_index.wrapping_sub(3) as i32;
373 assert!(ip_index >= 2);
374 input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index - 2)..]);
375 cur_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
376 prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
377 table[(prev_hash as usize)] = ip_index.wrapping_sub(2) as i32;
378 prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
379 table[(prev_hash as usize)] = ip_index.wrapping_sub(1) as i32;
380 }
381 candidate = table[(cur_hash as usize)] as usize;
382 table[(cur_hash as usize)] = ip_index as i32;
383 }
384 }
385 if goto_emit_remainder == 0 {
386 next_hash = Hash(
387 &base_ip[{
388 ip_index = ip_index.wrapping_add(1);
389 ip_index
390 }..],
391 shift,
392 min_match,
393 );
394 }
395 }
396 }
397 0i32;
398 if next_emit < ip_end {
399 let insert: u32 = ip_end.wrapping_sub(next_emit) as u32;
400 *num_commands += EmitInsertLen(insert, commands);
401 literals[..insert as usize]
402 .clone_from_slice(&base_ip[next_emit..(next_emit + insert as usize)]);
403 let mut xliterals = core::mem::take(literals);
404 *literals = &mut core::mem::take(&mut xliterals)[(insert as usize)..];
405 *num_literals += insert as usize;
406 }
407}
408
409fn ShouldCompress(input: &[u8], input_size: usize, num_literals: usize) -> bool {
410 let corpus_size: super::util::floatX = input_size as (super::util::floatX);
411 if num_literals as (super::util::floatX) < 0.98 as super::util::floatX * corpus_size {
412 true
413 } else {
414 let mut literal_histo: [u32; 256] = [0; 256];
415 let max_total_bit_cost: super::util::floatX =
416 corpus_size * 8i32 as (super::util::floatX) * 0.98 as super::util::floatX
417 / 43i32 as (super::util::floatX);
418 let mut i: usize;
419 i = 0usize;
420 while i < input_size {
421 {
422 let _rhs = 1;
423 let _lhs = &mut literal_histo[input[i] as usize];
424 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
425 }
426 i = i.wrapping_add(43);
427 }
428 BitsEntropy(&mut literal_histo[..], 256) < max_total_bit_cost
429 }
430}
431
432pub fn BrotliWriteBits(n_bits: usize, bits: u64, pos: &mut usize, array: &mut [u8]) {
433 let p = &mut array[(*pos >> 3)..];
434 let mut v: u64 = p[0] as (u64);
435 v |= bits << (*pos & 7);
436 BROTLI_UNALIGNED_STORE64(p, v);
437 *pos = pos.wrapping_add(n_bits);
438}
439pub fn BrotliStoreMetaBlockHeader(
440 len: usize,
441 is_uncompressed: i32,
442 storage_ix: &mut usize,
443 storage: &mut [u8],
444) {
445 let mut nibbles: u64 = 6;
446 BrotliWriteBits(1, 0, storage_ix, storage);
447 if len <= (1u32 << 16) as usize {
448 nibbles = 4;
449 } else if len <= (1u32 << 20) as usize {
450 nibbles = 5;
451 }
452 BrotliWriteBits(2, nibbles.wrapping_sub(4), storage_ix, storage);
453 BrotliWriteBits(
454 nibbles.wrapping_mul(4) as usize,
455 len.wrapping_sub(1) as u64,
456 storage_ix,
457 storage,
458 );
459 BrotliWriteBits(1usize, is_uncompressed as (u64), storage_ix, storage);
460}
461
462pub fn memcpy<T: Sized + Clone>(
463 dst: &mut [T],
464 dst_offset: usize,
465 src: &[T],
466 src_offset: usize,
467 size_to_copy: usize,
468) {
469 dst[dst_offset..(dst_offset + size_to_copy)]
470 .clone_from_slice(&src[src_offset..(src_offset + size_to_copy)]);
471}
472fn BuildAndStoreCommandPrefixCode(
473 histogram: &[u32],
474 depth: &mut [u8],
475 bits: &mut [u16],
476 storage_ix: &mut usize,
477 storage: &mut [u8],
478) {
479 let mut tree: [HuffmanTree; 129] = [NewHuffmanTree(0, 0, 0); 129];
480 let mut cmd_depth: [u8; 704] = [0; 704];
481 let mut cmd_bits: [u16; 64] = [0; 64];
482 BrotliCreateHuffmanTree(histogram, 64usize, 15i32, &mut tree[..], depth);
483 BrotliCreateHuffmanTree(
484 &histogram[64..],
485 64usize,
486 14i32,
487 &mut tree[..],
488 &mut depth[64..],
489 );
490 memcpy(&mut cmd_depth[..], 0, depth, 24, 24);
496 memcpy(&mut cmd_depth[..], 24, depth, 0, 8);
497 memcpy(&mut cmd_depth[..], 32usize, depth, (48usize), 8usize);
498 memcpy(&mut cmd_depth[..], 40usize, depth, (8usize), 8usize);
499 memcpy(&mut cmd_depth[..], 48usize, depth, (56usize), 8usize);
500 memcpy(&mut cmd_depth[..], 56usize, depth, (16usize), 8usize);
501 BrotliConvertBitDepthsToSymbols(&mut cmd_depth[..], 64usize, &mut cmd_bits[..]);
502 memcpy(bits, 0, &cmd_bits[..], 24usize, 16usize);
503 memcpy(bits, (8usize), &cmd_bits[..], 40usize, 8usize);
504 memcpy(bits, (16usize), &cmd_bits[..], 56usize, 8usize);
505 memcpy(bits, (24usize), &cmd_bits[..], 0, 48usize);
506 memcpy(bits, (48usize), &cmd_bits[..], 32usize, 8usize);
507 memcpy(bits, (56usize), &cmd_bits[..], 48usize, 8usize);
508 BrotliConvertBitDepthsToSymbols(&mut depth[64..], 64usize, &mut bits[64..]);
509 {
510 let mut i: usize;
511 for item in cmd_depth[..64].iter_mut() {
512 *item = 0;
513 }
514 memcpy(&mut cmd_depth[..], 0, depth, (24usize), 8usize);
516 memcpy(&mut cmd_depth[..], 64usize, depth, (32usize), 8usize);
517 memcpy(&mut cmd_depth[..], 128usize, depth, (40usize), 8usize);
518 memcpy(&mut cmd_depth[..], 192usize, depth, (48usize), 8usize);
519 memcpy(&mut cmd_depth[..], 384usize, depth, (56usize), 8usize);
520 i = 0usize;
521 while i < 8usize {
522 {
523 cmd_depth[(128usize).wrapping_add((8usize).wrapping_mul(i))] = depth[i];
524 cmd_depth[(256usize).wrapping_add((8usize).wrapping_mul(i))] =
525 depth[i.wrapping_add(8)];
526 cmd_depth[(448usize).wrapping_add((8usize).wrapping_mul(i))] =
527 depth[i.wrapping_add(16)];
528 }
529 i = i.wrapping_add(1);
530 }
531 BrotliStoreHuffmanTree(
532 &mut cmd_depth[..],
533 704usize,
534 &mut tree[..],
535 storage_ix,
536 storage,
537 );
538 }
539 BrotliStoreHuffmanTree(
540 &mut depth[64..],
541 64usize,
542 &mut tree[..],
543 storage_ix,
544 storage,
545 );
546}
547
548fn StoreCommands<AllocHT: alloc::Allocator<HuffmanTree>>(
549 mht: &mut AllocHT,
550 mut literals: &[u8],
551 num_literals: usize,
552 commands: &[u32],
553 num_commands: usize,
554 storage_ix: &mut usize,
555 storage: &mut [u8],
556) {
557 static kNumExtraBits: [u32; 128] = [
558 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 7, 8, 9, 10, 12, 14, 24, 0, 0, 0, 0, 0,
559 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6,
560 7, 8, 9, 10, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5,
561 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14, 15, 15, 16, 16, 17, 17,
562 18, 18, 19, 19, 20, 20, 21, 21, 22, 22, 23, 23, 24, 24,
563 ];
564 static kInsertOffset: [u32; 24] = [
565 0, 1, 2, 3, 4, 5, 6, 8, 10, 14, 18, 26, 34, 50, 66, 98, 130, 194, 322, 578, 1090, 2114,
566 6210, 22594,
567 ];
568 let mut lit_depths: [u8; 256] = [0; 256];
569 let mut lit_bits: [u16; 256] = [0; 256]; let mut lit_histo: [u32; 256] = [0; 256]; let mut cmd_depths: [u8; 128] = [0; 128];
572 let mut cmd_bits: [u16; 128] = [0; 128];
573 let mut cmd_histo: [u32; 128] = [0; 128];
574 let mut i: usize;
575 i = 0usize;
576 while i < num_literals {
577 {
578 let _rhs = 1;
579 let _lhs = &mut lit_histo[literals[i] as usize];
580 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
581 }
582 i = i.wrapping_add(1);
583 }
584 BrotliBuildAndStoreHuffmanTreeFast(
585 mht,
586 &lit_histo[..],
587 num_literals,
588 8usize,
589 &mut lit_depths[..],
590 &mut lit_bits[..],
591 storage_ix,
592 storage,
593 );
594 i = 0usize;
595 while i < num_commands {
596 {
597 let code: u32 = commands[i] & 0xffu32;
598 0i32;
599 {
600 let _rhs = 1;
601 let _lhs = &mut cmd_histo[code as usize];
602 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
603 }
604 }
605 i = i.wrapping_add(1);
606 }
607 {
608 let _rhs = 1i32;
609 let _lhs = &mut cmd_histo[1];
610 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
611 }
612 {
613 let _rhs = 1i32;
614 let _lhs = &mut cmd_histo[2];
615 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
616 }
617 {
618 let _rhs = 1i32;
619 let _lhs = &mut cmd_histo[64];
620 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
621 }
622 {
623 let _rhs = 1i32;
624 let _lhs = &mut cmd_histo[84];
625 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
626 }
627 BuildAndStoreCommandPrefixCode(
628 &mut cmd_histo[..],
629 &mut cmd_depths[..],
630 &mut cmd_bits[..],
631 storage_ix,
632 storage,
633 );
634 i = 0usize;
635 while i < num_commands {
636 {
637 let cmd: u32 = commands[i];
638 let code: u32 = cmd & 0xffu32;
639 let extra: u32 = cmd >> 8;
640 0i32;
641 BrotliWriteBits(
642 cmd_depths[code as usize] as usize,
643 cmd_bits[code as usize] as (u64),
644 storage_ix,
645 storage,
646 );
647 BrotliWriteBits(
648 kNumExtraBits[code as usize] as usize,
649 extra as (u64),
650 storage_ix,
651 storage,
652 );
653 if code < 24u32 {
654 let insert: u32 = kInsertOffset[code as usize].wrapping_add(extra);
655 for literal in literals[..(insert as usize)].iter() {
656 let lit: u8 = *literal;
657 BrotliWriteBits(
658 lit_depths[lit as usize] as usize,
659 lit_bits[lit as usize] as (u64),
660 storage_ix,
661 storage,
662 );
663 }
664 literals = &literals[insert as usize..];
665 }
666 }
667 i = i.wrapping_add(1);
668 }
669}
670fn EmitUncompressedMetaBlock(
671 input: &[u8],
672 input_size: usize,
673 storage_ix: &mut usize,
674 storage: &mut [u8],
675) {
676 BrotliStoreMetaBlockHeader(input_size, 1i32, storage_ix, storage);
677 *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
678 memcpy(storage, (*storage_ix >> 3), input, 0, input_size);
679 *storage_ix = storage_ix.wrapping_add(input_size << 3);
680 storage[(*storage_ix >> 3)] = 0u8;
681}
682#[allow(unused_variables)]
683#[inline(always)]
684fn BrotliCompressFragmentTwoPassImpl<AllocHT: alloc::Allocator<HuffmanTree>>(
685 m: &mut AllocHT,
686 base_ip: &[u8],
687 mut input_size: usize,
688 is_last: i32,
689 command_buf: &mut [u32],
690 literal_buf: &mut [u8],
691 table: &mut [i32],
692 table_bits: usize,
693 min_match: usize,
694 storage_ix: &mut usize,
695 storage: &mut [u8],
696) {
697 let mut input_index: usize = 0usize;
698 while input_size > 0usize {
699 let block_size: usize = brotli_min_size_t(input_size, kCompressFragmentTwoPassBlockSize);
700 let mut num_literals: usize = 0;
701 let mut num_commands: usize = 0;
702 {
703 let mut literals = &mut literal_buf[..];
704 let mut commands = &mut command_buf[..];
705 CreateCommands(
706 input_index,
707 block_size,
708 input_size,
709 base_ip,
710 table,
711 table_bits,
712 min_match,
713 &mut literals,
714 &mut num_literals,
715 &mut commands,
716 &mut num_commands,
717 );
718 }
719 if ShouldCompress(&base_ip[input_index..], block_size, num_literals) {
720 BrotliStoreMetaBlockHeader(block_size, 0i32, storage_ix, storage);
721 BrotliWriteBits(13usize, 0, storage_ix, storage);
722 StoreCommands(
723 m,
724 literal_buf,
725 num_literals,
726 command_buf,
727 num_commands,
728 storage_ix,
729 storage,
730 );
731 } else {
732 EmitUncompressedMetaBlock(&base_ip[input_index..], block_size, storage_ix, storage);
733 }
734 input_index = input_index.wrapping_add(block_size);
735 input_size = input_size.wrapping_sub(block_size);
736 }
737}
738macro_rules! compress_specialization {
739 ($table_bits : expr, $fname: ident) => {
740 fn $fname<AllocHT: alloc::Allocator<HuffmanTree>>(
741 mht: &mut AllocHT,
742 input: &[u8],
743 input_size: usize,
744 is_last: i32,
745 command_buf: &mut [u32],
746 literal_buf: &mut [u8],
747 table: &mut [i32],
748 storage_ix: &mut usize,
749 storage: &mut [u8],
750 ) {
751 let min_match = if $table_bits < 15 { 4 } else { 6 };
752 BrotliCompressFragmentTwoPassImpl(
753 mht,
754 input,
755 input_size,
756 is_last,
757 command_buf,
758 literal_buf,
759 table,
760 $table_bits,
761 min_match,
762 storage_ix,
763 storage,
764 );
765 }
766 };
767}
768compress_specialization!(8, BrotliCompressFragmentTwoPassImpl8);
769compress_specialization!(9, BrotliCompressFragmentTwoPassImpl9);
770compress_specialization!(10, BrotliCompressFragmentTwoPassImpl10);
771compress_specialization!(11, BrotliCompressFragmentTwoPassImpl11);
772compress_specialization!(12, BrotliCompressFragmentTwoPassImpl12);
773compress_specialization!(13, BrotliCompressFragmentTwoPassImpl13);
774compress_specialization!(14, BrotliCompressFragmentTwoPassImpl14);
775compress_specialization!(15, BrotliCompressFragmentTwoPassImpl15);
776compress_specialization!(16, BrotliCompressFragmentTwoPassImpl16);
777compress_specialization!(17, BrotliCompressFragmentTwoPassImpl17);
778
779fn RewindBitPosition(new_storage_ix: usize, storage_ix: &mut usize, storage: &mut [u8]) {
780 let bitpos: usize = new_storage_ix & 7usize;
781 let mask: usize = (1u32 << bitpos).wrapping_sub(1) as usize;
782 {
783 let _rhs = mask as u8;
784 let _lhs = &mut storage[(new_storage_ix >> 3)];
785 *_lhs = (*_lhs as i32 & _rhs as i32) as u8;
786 }
787 *storage_ix = new_storage_ix;
788}
789
790pub fn BrotliCompressFragmentTwoPass<AllocHT: alloc::Allocator<HuffmanTree>>(
791 m: &mut AllocHT,
792 input: &[u8],
793 input_size: usize,
794 is_last: i32,
795 command_buf: &mut [u32],
796 literal_buf: &mut [u8],
797 table: &mut [i32],
798 table_size: usize,
799 storage_ix: &mut usize,
800 storage: &mut [u8],
801) {
802 let initial_storage_ix: usize = *storage_ix;
803 let table_bits: usize = Log2FloorNonZero(table_size as u64) as usize;
804 if table_bits == 8usize {
805 BrotliCompressFragmentTwoPassImpl8(
806 m,
807 input,
808 input_size,
809 is_last,
810 command_buf,
811 literal_buf,
812 table,
813 storage_ix,
814 storage,
815 );
816 }
817 if table_bits == 9usize {
818 BrotliCompressFragmentTwoPassImpl9(
819 m,
820 input,
821 input_size,
822 is_last,
823 command_buf,
824 literal_buf,
825 table,
826 storage_ix,
827 storage,
828 );
829 }
830 if table_bits == 10usize {
831 BrotliCompressFragmentTwoPassImpl10(
832 m,
833 input,
834 input_size,
835 is_last,
836 command_buf,
837 literal_buf,
838 table,
839 storage_ix,
840 storage,
841 );
842 }
843 if table_bits == 11usize {
844 BrotliCompressFragmentTwoPassImpl11(
845 m,
846 input,
847 input_size,
848 is_last,
849 command_buf,
850 literal_buf,
851 table,
852 storage_ix,
853 storage,
854 );
855 }
856 if table_bits == 12usize {
857 BrotliCompressFragmentTwoPassImpl12(
858 m,
859 input,
860 input_size,
861 is_last,
862 command_buf,
863 literal_buf,
864 table,
865 storage_ix,
866 storage,
867 );
868 }
869 if table_bits == 13usize {
870 BrotliCompressFragmentTwoPassImpl13(
871 m,
872 input,
873 input_size,
874 is_last,
875 command_buf,
876 literal_buf,
877 table,
878 storage_ix,
879 storage,
880 );
881 }
882 if table_bits == 14usize {
883 BrotliCompressFragmentTwoPassImpl14(
884 m,
885 input,
886 input_size,
887 is_last,
888 command_buf,
889 literal_buf,
890 table,
891 storage_ix,
892 storage,
893 );
894 }
895 if table_bits == 15usize {
896 BrotliCompressFragmentTwoPassImpl15(
897 m,
898 input,
899 input_size,
900 is_last,
901 command_buf,
902 literal_buf,
903 table,
904 storage_ix,
905 storage,
906 );
907 }
908 if table_bits == 16usize {
909 BrotliCompressFragmentTwoPassImpl16(
910 m,
911 input,
912 input_size,
913 is_last,
914 command_buf,
915 literal_buf,
916 table,
917 storage_ix,
918 storage,
919 );
920 }
921 if table_bits == 17usize {
922 BrotliCompressFragmentTwoPassImpl17(
923 m,
924 input,
925 input_size,
926 is_last,
927 command_buf,
928 literal_buf,
929 table,
930 storage_ix,
931 storage,
932 );
933 }
934 if storage_ix.wrapping_sub(initial_storage_ix) > (31usize).wrapping_add(input_size << 3) {
935 RewindBitPosition(initial_storage_ix, storage_ix, storage);
936 EmitUncompressedMetaBlock(input, input_size, storage_ix, storage);
937 }
938 if is_last != 0 {
939 BrotliWriteBits(1, 1, storage_ix, storage);
940 BrotliWriteBits(1, 1, storage_ix, storage);
941 *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
942 }
943}