1#![allow(unknown_lints)]
2#![allow(dead_code)]
3#![allow(unused_imports)]
4#![allow(unused_macros)]
5use super::combined_alloc::BrotliAlloc;
6use super::prior_eval;
7use super::stride_eval;
8use super::util::floatX;
9use super::{s16, v8};
10#[cfg(feature = "std")]
11use std::io::Write;
12use VERSION;
13
14use super::block_split::BlockSplit;
15use super::input_pair::{InputPair, InputReference, InputReferenceMut};
16use enc::backward_references::BrotliEncoderParams;
17
18use super::super::alloc;
19use super::super::alloc::{Allocator, SliceWrapper, SliceWrapperMut};
20use super::super::core;
21use super::super::dictionary::{
22 kBrotliDictionary, kBrotliDictionaryOffsetsByLength, kBrotliDictionarySizeBitsByLength,
23};
24use super::super::transform::TransformDictionaryWord;
25use super::command::{
26 Command, CommandDistanceIndexAndOffset, GetCopyLengthCode, GetInsertLengthCode,
27};
28use super::constants::{
29 kCodeLengthBits, kCodeLengthDepth, kCopyBase, kCopyExtra, kInsBase, kInsExtra,
30 kNonZeroRepsBits, kNonZeroRepsDepth, kSigned3BitContextLookup, kStaticCommandCodeBits,
31 kStaticCommandCodeDepth, kStaticDistanceCodeBits, kStaticDistanceCodeDepth, kUTF8ContextLookup,
32 kZeroRepsBits, kZeroRepsDepth, BROTLI_CONTEXT_LUT, BROTLI_NUM_BLOCK_LEN_SYMBOLS,
33 BROTLI_NUM_COMMAND_SYMBOLS, BROTLI_NUM_HISTOGRAM_DISTANCE_SYMBOLS, BROTLI_NUM_LITERAL_SYMBOLS,
34};
35use super::context_map_entropy::{speed_to_tuple, ContextMapEntropy, SpeedAndMax};
36use super::entropy_encode::{
37 BrotliConvertBitDepthsToSymbols, BrotliCreateHuffmanTree, BrotliSetDepth,
38 BrotliWriteHuffmanTree, HuffmanComparator, HuffmanTree, InitHuffmanTree, NewHuffmanTree,
39 SortHuffmanTreeItems,
40};
41use super::find_stride;
42use super::histogram::{
43 ContextType, HistogramAddItem, HistogramCommand, HistogramDistance, HistogramLiteral,
44};
45use super::interface;
46use super::interface::{CommandProcessor, StaticCommand};
47use super::pdf::PDF;
48use super::static_dict::kNumDistanceCacheEntries;
49use super::vectorization::Mem256f;
50pub struct PrefixCodeRange {
51 pub offset: u32,
52 pub nbits: u32,
53}
54pub const MAX_SIMPLE_DISTANCE_ALPHABET_SIZE: usize = 140;
55
56fn window_size_from_lgwin(lgwin: i32) -> usize {
57 (1 << lgwin) - 16usize
58}
59
60fn context_type_str(context_type: ContextType) -> &'static str {
61 match context_type {
62 ContextType::CONTEXT_LSB6 => "lsb6",
63 ContextType::CONTEXT_MSB6 => "msb6",
64 ContextType::CONTEXT_UTF8 => "utf8",
65 ContextType::CONTEXT_SIGNED => "sign",
66 }
67}
68
69fn prediction_mode_str(
70 prediction_mode_nibble: interface::LiteralPredictionModeNibble,
71) -> &'static str {
72 match prediction_mode_nibble.prediction_mode() {
73 interface::LITERAL_PREDICTION_MODE_SIGN => "sign",
74 interface::LITERAL_PREDICTION_MODE_LSB6 => "lsb6",
75 interface::LITERAL_PREDICTION_MODE_MSB6 => "msb6",
76 interface::LITERAL_PREDICTION_MODE_UTF8 => "utf8",
77 _ => "unknown",
78 }
79}
80
81fn is_long_enough_to_be_random(len: usize, high_entropy_detection_quality: u8) -> bool {
82 match high_entropy_detection_quality {
83 0 => false,
84 1 => false,
85 2 => len >= 128,
86 3 => len >= 96,
87 4 => len >= 64,
88 5 => len >= 48,
89 6 => len >= 32,
90 7 => len >= 24,
91 8 => len >= 16,
92 9 => len >= 8,
93 10 => len >= 4,
94 11 => len >= 1,
95 _ => len >= 8,
96 }
97}
98const COMMAND_BUFFER_SIZE: usize = 4096;
99
100struct CommandQueue<'a, Alloc: BrotliAlloc + 'a> {
101 mb: InputPair<'a>,
102 mb_byte_offset: usize,
103 mc: &'a mut Alloc,
104 queue: <Alloc as Allocator<StaticCommand>>::AllocatedMemory,
105 pred_mode: interface::PredictionModeContextMap<InputReferenceMut<'a>>,
106 loc: usize,
107 entropy_tally_scratch: find_stride::EntropyTally<Alloc>,
108 best_strides_per_block_type: <Alloc as Allocator<u8>>::AllocatedMemory,
109 entropy_pyramid: find_stride::EntropyPyramid<Alloc>,
110 context_map_entropy: ContextMapEntropy<'a, Alloc>,
111 stride_detection_quality: u8,
112 high_entropy_detection_quality: u8,
113 block_type_literal: u8,
114 best_stride_index: usize,
115 overfull: bool,
116}
117
118impl<'a, Alloc: BrotliAlloc> CommandQueue<'a, Alloc> {
119 fn new(
120 alloc: &'a mut Alloc,
121 num_commands: usize,
122 pred_mode: interface::PredictionModeContextMap<InputReferenceMut<'a>>,
123 mb: InputPair<'a>,
124 stride_detection_quality: u8,
125 high_entropy_detection_quality: u8,
126 context_map_entropy: ContextMapEntropy<'a, Alloc>,
127 best_strides: <Alloc as Allocator<u8>>::AllocatedMemory,
128 entropy_tally_scratch: find_stride::EntropyTally<Alloc>,
129 entropy_pyramid: find_stride::EntropyPyramid<Alloc>,
130 ) -> CommandQueue<'a, Alloc> {
131 let queue =
134 <Alloc as Allocator<StaticCommand>>::alloc_cell(alloc, num_commands * 17 / 16 + 4);
135 CommandQueue {
136 mc: alloc,
137 queue, pred_mode,
139 mb,
140 mb_byte_offset: 0,
141 loc: 0,
142 best_strides_per_block_type: best_strides,
143 entropy_tally_scratch,
144 entropy_pyramid,
145 stride_detection_quality,
146 high_entropy_detection_quality,
147 context_map_entropy,
148 block_type_literal: 0,
149 best_stride_index: 0,
150 overfull: false,
151 }
152 }
153 fn full(&self) -> bool {
154 self.loc == self.queue.len()
155 }
156 fn error_if_full(&mut self) {
157 if self.full() {
158 self.overfull = true;
159 }
160 }
161 fn size(&self) -> usize {
162 self.loc
163 }
164 fn clear(&mut self) {
165 self.loc = 0;
166 self.block_type_literal = 0;
167 }
168 fn free<Cb>(&mut self, callback: &mut Cb) -> Result<(), ()>
169 where
170 Cb: FnMut(
171 &mut interface::PredictionModeContextMap<InputReferenceMut>,
172 &mut [interface::StaticCommand],
173 InputPair,
174 &mut Alloc,
175 ),
176 {
177 callback(
178 &mut self.pred_mode,
179 self.queue.slice_mut().split_at_mut(self.loc).0,
180 self.mb,
181 self.mc,
182 );
183 self.clear();
184 self.entropy_tally_scratch.free(self.mc);
185 self.entropy_pyramid.free(self.mc);
186 self.context_map_entropy.free(self.mc);
187 <Alloc as Allocator<StaticCommand>>::free_cell(self.mc, core::mem::take(&mut self.queue));
188 <Alloc as Allocator<u8>>::free_cell(
189 self.mc,
190 core::mem::take(&mut self.best_strides_per_block_type),
191 );
192 if self.overfull {
193 return Err(());
194 }
195 Ok(())
196 }
197}
198
199impl<'a, Alloc: BrotliAlloc> interface::CommandProcessor<'a> for CommandQueue<'a, Alloc> {
200 fn push(&mut self, val: interface::Command<InputReference<'a>>) {
201 if self.full() {
202 let mut tmp = <Alloc as Allocator<StaticCommand>>::alloc_cell(
203 self.mc,
204 self.queue.slice().len() * 2,
205 );
206 tmp.slice_mut()
207 .split_at_mut(self.queue.slice().len())
208 .0
209 .clone_from_slice(self.queue.slice());
210 <Alloc as Allocator<StaticCommand>>::free_cell(
211 self.mc,
212 core::mem::replace(&mut self.queue, tmp),
213 );
214 }
215 if !self.full() {
216 self.queue.slice_mut()[self.loc] = val.freeze();
217 self.loc += 1;
218 } else {
219 self.error_if_full();
220 }
221 }
222 fn push_block_switch_literal(&mut self, block_type: u8) {
223 self.push(interface::Command::BlockSwitchLiteral(
224 interface::LiteralBlockSwitch::new(block_type, 0),
225 ))
226 }
227}
228
229#[cfg(feature = "std")]
230fn warn_on_missing_free() {
231 let _err = ::std::io::stderr()
232 .write(b"Need to free entropy_tally_scratch before dropping CommandQueue\n");
233}
234#[cfg(not(feature = "std"))]
235fn warn_on_missing_free() {
236 }
238impl<'a, Alloc: BrotliAlloc> Drop for CommandQueue<'a, Alloc> {
239 fn drop(&mut self) {
240 if !self.entropy_tally_scratch.is_free() {
241 warn_on_missing_free();
242 }
243 }
244}
245#[cfg(not(feature = "billing"))]
246fn best_singleton_speed_log(_name: &str, _data: &[SpeedAndMax; 2], _cost: &[floatX; 2]) {}
247#[cfg(feature = "billing")]
248fn best_singleton_speed_log(name: &str, data: &[SpeedAndMax; 2], cost: &[floatX; 2]) {
249 println!(
250 "{} hi cost: {} lo cost: {} speeds {:?} {:?}",
251 name, cost[1], cost[0], data[1], data[0]
252 );
253}
254
255#[cfg(not(feature = "billing"))]
256fn best_speed_log(_name: &str, _data: &[SpeedAndMax; 2], _cost: &[floatX; 2]) {}
257#[cfg(feature = "billing")]
258fn best_speed_log(name: &str, data: &[SpeedAndMax; 2], cost: &[floatX; 2]) {
259 for high in 0..2 {
260 println!(
261 "{} Speed [ inc: {}, max: {}, algo: 0 ] cost: {}",
262 name,
263 if high != 0 { "hi" } else { "lo" },
264 data[high].0,
265 data[high].1,
266 cost[high]
267 );
268 }
269}
270
271fn process_command_queue<'a, CmdProcessor: interface::CommandProcessor<'a>>(
272 command_queue: &mut CmdProcessor,
273 input: InputPair<'a>,
274 commands: &[Command],
275 dist_cache: &[i32; kNumDistanceCacheEntries],
276 mut recoder_state: RecoderState,
277 block_type: &MetaBlockSplitRefs,
278 params: &BrotliEncoderParams,
279 context_type: Option<ContextType>,
280) -> RecoderState {
281 let mut input_iter = input;
282 let mut local_dist_cache = [0i32; kNumDistanceCacheEntries];
283 local_dist_cache.clone_from_slice(&dist_cache[..]);
284 let mut btypel_counter = 0usize;
285 let mut btypec_counter = 0usize;
286 let mut btyped_counter = 0usize;
287 let mut btypel_sub = if block_type.btypel.num_types == 1 {
288 1u32 << 31
289 } else {
290 block_type.btypel.lengths[0]
291 };
292 let mut btypec_sub = if block_type.btypec.num_types == 1 {
293 1u32 << 31
294 } else {
295 block_type.btypec.lengths[0]
296 };
297 let mut btyped_sub = if block_type.btyped.num_types == 1 {
298 1u32 << 31
299 } else {
300 block_type.btyped.lengths[0]
301 };
302 {
303 command_queue.push_block_switch_literal(0);
304 }
305 let mut mb_len = input.len();
306 for cmd in commands.iter() {
307 let (inserts, interim) =
308 input_iter.split_at(core::cmp::min(cmd.insert_len_ as usize, mb_len));
309 recoder_state.num_bytes_encoded += inserts.len();
310 let _copy_cursor = input.len() - interim.len();
311 let copylen_code: u32 = CommandCopyLenCode(cmd);
313
314 let (prev_dist_index, dist_offset) = CommandDistanceIndexAndOffset(cmd, ¶ms.dist);
315 let final_distance: usize;
316 if prev_dist_index == 0 {
317 final_distance = dist_offset as usize;
318 } else {
319 final_distance =
320 (local_dist_cache[prev_dist_index - 1] as isize + dist_offset) as usize;
321 }
322 let copy_len = copylen_code as usize;
323 let actual_copy_len: usize;
324 let max_distance = core::cmp::min(
325 recoder_state.num_bytes_encoded,
326 window_size_from_lgwin(params.lgwin),
327 );
328 assert!(inserts.len() <= mb_len);
329 if inserts.len() != 0 {
330 let mut tmp_inserts = inserts;
331 while tmp_inserts.len() > btypel_sub as usize {
332 let (in_a, in_b) = tmp_inserts.split_at(btypel_sub as usize);
334 if in_a.len() != 0 {
335 if context_type.is_some() {
336 command_queue.push_literals(&in_a);
337 } else if params.high_entropy_detection_quality == 0 {
338 command_queue.push_literals(&in_a);
339 } else {
340 command_queue.push_rand_literals(&in_a);
341 }
342 }
343 mb_len -= in_a.len();
344 tmp_inserts = in_b;
345 btypel_counter += 1;
346 if block_type.btypel.types.len() > btypel_counter {
347 btypel_sub = block_type.btypel.lengths[btypel_counter];
348 command_queue
349 .push_block_switch_literal(block_type.btypel.types[btypel_counter]);
350 } else {
351 btypel_sub = 1u32 << 31;
352 }
353 }
354 if context_type.is_some() {
355 command_queue.push_literals(&tmp_inserts);
356 } else if params.high_entropy_detection_quality == 0 {
357 command_queue.push_literals(&tmp_inserts);
358 } else {
359 command_queue.push_rand_literals(&tmp_inserts);
360 }
361 if tmp_inserts.len() != 0 {
362 mb_len -= tmp_inserts.len();
363 btypel_sub -= tmp_inserts.len() as u32;
364 }
365 }
366 if final_distance > max_distance {
367 assert!(copy_len >= 4);
369 assert!(copy_len < 25);
370 let dictionary_offset = final_distance - max_distance - 1;
371 let ndbits = kBrotliDictionarySizeBitsByLength[copy_len] as usize;
372 let action = dictionary_offset >> ndbits;
373 let word_sub_index = dictionary_offset & ((1 << ndbits) - 1);
374 let word_index =
375 word_sub_index * copy_len + kBrotliDictionaryOffsetsByLength[copy_len] as usize;
376 let raw_word = &kBrotliDictionary[word_index..word_index + copy_len];
377 let mut transformed_word = [0u8; 38];
378 actual_copy_len = TransformDictionaryWord(
379 &mut transformed_word[..],
380 raw_word,
381 copy_len as i32,
382 action as i32,
383 ) as usize;
384 if actual_copy_len <= mb_len {
385 command_queue.push(interface::Command::Dict(interface::DictCommand {
386 word_size: copy_len as u8,
387 transform: action as u8,
388 final_size: actual_copy_len as u8,
389 empty: 0,
390 word_id: word_sub_index as u32,
391 }));
392 mb_len -= actual_copy_len;
393 assert_eq!(
394 InputPair(
395 InputReference {
396 data: transformed_word.split_at(actual_copy_len).0,
397 orig_offset: 0
398 },
399 InputReference::default()
400 ),
401 interim.split_at(actual_copy_len).0
402 );
403 } else if mb_len != 0 {
404 command_queue.push_literals(&interim.split_at(mb_len).0);
407 mb_len = 0;
408 assert_eq!(
409 InputPair(
410 InputReference {
411 data: transformed_word.split_at(mb_len).0,
412 orig_offset: 0
413 },
414 InputReference::default()
415 ),
416 interim.split_at(mb_len).0
417 );
418 }
419 } else {
420 actual_copy_len = core::cmp::min(mb_len, copy_len);
421 if actual_copy_len != 0 {
422 command_queue.push(interface::Command::Copy(interface::CopyCommand {
423 distance: final_distance as u32,
424 num_bytes: actual_copy_len as u32,
425 }));
426 }
427 mb_len -= actual_copy_len;
428 if prev_dist_index != 1 || dist_offset != 0 {
429 let mut tmp_dist_cache = [0i32; kNumDistanceCacheEntries - 1];
431 tmp_dist_cache.clone_from_slice(&local_dist_cache[..kNumDistanceCacheEntries - 1]);
432 local_dist_cache[1..].clone_from_slice(&tmp_dist_cache[..]);
433 local_dist_cache[0] = final_distance as i32;
434 }
435 }
436 {
437 btypec_sub -= 1;
438 if btypec_sub == 0 {
439 btypec_counter += 1;
440 if block_type.btypec.types.len() > btypec_counter {
441 btypec_sub = block_type.btypec.lengths[btypec_counter];
442 command_queue.push(interface::Command::BlockSwitchCommand(
443 interface::BlockSwitch(block_type.btypec.types[btypec_counter]),
444 ));
445 } else {
446 btypec_sub = 1u32 << 31;
447 }
448 }
449 }
450 if copy_len != 0 && cmd.cmd_prefix_ >= 128 {
451 btyped_sub -= 1;
452 if btyped_sub == 0 {
453 btyped_counter += 1;
454 if block_type.btyped.types.len() > btyped_counter {
455 btyped_sub = block_type.btyped.lengths[btyped_counter];
456 command_queue.push(interface::Command::BlockSwitchDistance(
457 interface::BlockSwitch(block_type.btyped.types[btyped_counter]),
458 ));
459 } else {
460 btyped_sub = 1u32 << 31;
461 }
462 }
463 }
464
465 let (copied, remainder) = interim.split_at(actual_copy_len);
466 recoder_state.num_bytes_encoded += copied.len();
467 input_iter = remainder;
468 }
469 recoder_state
470}
471
472fn LogMetaBlock<'a, Alloc: BrotliAlloc, Cb>(
473 alloc: &mut Alloc,
474 commands: &[Command],
475 input0: &'a [u8],
476 input1: &'a [u8],
477 dist_cache: &[i32; kNumDistanceCacheEntries],
478 recoder_state: &mut RecoderState,
479 block_type: MetaBlockSplitRefs,
480 params: &BrotliEncoderParams,
481 context_type: Option<ContextType>,
482 callback: &mut Cb,
483) where
484 Cb: FnMut(
485 &mut interface::PredictionModeContextMap<InputReferenceMut>,
486 &mut [interface::StaticCommand],
487 InputPair,
488 &mut Alloc,
489 ),
490{
491 let mut local_literal_context_map = [0u8; 256 * 64];
492 let mut local_distance_context_map = [0u8; 256 * 64 + interface::DISTANCE_CONTEXT_MAP_OFFSET];
493 assert_eq!(
494 *block_type.btypel.types.iter().max().unwrap_or(&0) as u32 + 1,
495 block_type.btypel.num_types
496 );
497 assert_eq!(
498 *block_type.btypec.types.iter().max().unwrap_or(&0) as u32 + 1,
499 block_type.btypec.num_types
500 );
501 assert_eq!(
502 *block_type.btyped.types.iter().max().unwrap_or(&0) as u32 + 1,
503 block_type.btyped.num_types
504 );
505 if block_type.literal_context_map.len() <= 256 * 64 {
506 for (index, item) in block_type.literal_context_map.iter().enumerate() {
507 local_literal_context_map[index] = *item as u8;
508 }
509 }
510 if block_type.distance_context_map.len() <= 256 * 64 {
511 for (index, item) in block_type.distance_context_map.iter().enumerate() {
512 local_distance_context_map[interface::DISTANCE_CONTEXT_MAP_OFFSET + index] =
513 *item as u8;
514 }
515 }
516
517 let mut prediction_mode = interface::PredictionModeContextMap::<InputReferenceMut> {
518 literal_context_map: InputReferenceMut {
519 data: local_literal_context_map
520 .split_at_mut(block_type.literal_context_map.len())
521 .0,
522 orig_offset: 0,
523 },
524 predmode_speed_and_distance_context_map: InputReferenceMut {
525 data: local_distance_context_map
526 .split_at_mut(
527 interface::PredictionModeContextMap::<InputReference>::size_of_combined_array(
528 block_type.distance_context_map.len(),
529 ),
530 )
531 .0,
532 orig_offset: 0,
533 },
534 };
535 for item in prediction_mode.get_mixing_values_mut().iter_mut() {
536 *item = prior_eval::WhichPrior::STRIDE1 as u8;
537 }
538 prediction_mode
539 .set_stride_context_speed([params.literal_adaptation[2], params.literal_adaptation[3]]);
540 prediction_mode
541 .set_context_map_speed([params.literal_adaptation[0], params.literal_adaptation[1]]);
542 prediction_mode.set_combined_stride_context_speed([
543 params.literal_adaptation[0],
544 params.literal_adaptation[1],
545 ]);
546
547 prediction_mode.set_literal_prediction_mode(interface::LiteralPredictionModeNibble(
548 context_type.unwrap_or(ContextType::CONTEXT_LSB6) as u8,
549 ));
550 let mut entropy_tally_scratch;
551 let mut entropy_pyramid;
552 if params.stride_detection_quality == 1 || params.stride_detection_quality == 2 {
553 entropy_tally_scratch = find_stride::EntropyTally::<Alloc>::new(alloc, None);
554 entropy_pyramid = find_stride::EntropyPyramid::<Alloc>::new(alloc);
555 entropy_pyramid.populate(input0, input1, &mut entropy_tally_scratch);
556 } else {
557 entropy_tally_scratch = find_stride::EntropyTally::<Alloc>::disabled_placeholder(alloc);
558 entropy_pyramid = find_stride::EntropyPyramid::<Alloc>::disabled_placeholder(alloc);
559 }
560 let input = InputPair(
561 InputReference {
562 data: input0,
563 orig_offset: 0,
564 },
565 InputReference {
566 data: input1,
567 orig_offset: input0.len(),
568 },
569 );
570 let mut best_strides = <Alloc as Allocator<u8>>::AllocatedMemory::default();
571 if params.stride_detection_quality > 2 {
572 let mut stride_selector =
573 stride_eval::StrideEval::<Alloc>::new(alloc, input, &prediction_mode, params);
574 process_command_queue(
575 &mut stride_selector,
576 input,
577 commands,
578 dist_cache,
579 *recoder_state,
580 &block_type,
581 params,
582 context_type,
583 );
584 let ntypes = stride_selector.num_types();
585 best_strides = <Alloc as Allocator<u8>>::alloc_cell(stride_selector.alloc(), ntypes);
586 stride_selector.choose_stride(best_strides.slice_mut());
587 }
588 let mut context_map_entropy = ContextMapEntropy::<Alloc>::new(
589 alloc,
590 input,
591 entropy_pyramid.stride_last_level_range(),
592 prediction_mode,
593 params.cdf_adaptation_detection,
594 );
595 if params.cdf_adaptation_detection != 0 {
596 process_command_queue(
597 &mut context_map_entropy,
598 input,
599 commands,
600 dist_cache,
601 *recoder_state,
602 &block_type,
603 params,
604 context_type,
605 );
606 {
607 let (cm_speed, cm_cost) = context_map_entropy.best_singleton_speeds(true, false);
608 let (stride_speed, stride_cost) =
609 context_map_entropy.best_singleton_speeds(false, false);
610 let (combined_speed, combined_cost) =
611 context_map_entropy.best_singleton_speeds(false, true);
612 best_singleton_speed_log("CM", &cm_speed, &cm_cost);
613 best_singleton_speed_log("stride", &stride_speed, &stride_cost);
614 best_singleton_speed_log("combined", &combined_speed, &combined_cost);
615 }
616
617 let cm_speed = context_map_entropy.best_speeds(true, false);
618 let stride_speed = context_map_entropy.best_speeds(false, false);
619 let combined_speed = context_map_entropy.best_speeds(false, true);
620 let acost = context_map_entropy.best_speeds_costs(true, false);
621 let bcost = context_map_entropy.best_speeds_costs(false, false);
622 let ccost = context_map_entropy.best_speeds_costs(false, true);
623 context_map_entropy
624 .prediction_mode_mut()
625 .set_stride_context_speed(speed_to_tuple(stride_speed));
626 context_map_entropy
627 .prediction_mode_mut()
628 .set_context_map_speed(speed_to_tuple(cm_speed));
629 context_map_entropy
630 .prediction_mode_mut()
631 .set_combined_stride_context_speed(speed_to_tuple(combined_speed));
632
633 best_speed_log("CM", &cm_speed, &acost);
634 best_speed_log("Stride", &stride_speed, &bcost);
635 best_speed_log("StrideCombined", &combined_speed, &ccost);
636 }
637 let mut prior_selector = prior_eval::PriorEval::<Alloc>::new(
638 alloc,
639 input,
640 entropy_pyramid.stride_last_level_range(),
641 context_map_entropy.take_prediction_mode(),
642 params,
643 );
644 if params.prior_bitmask_detection != 0 {
645 process_command_queue(
646 &mut prior_selector,
647 input,
648 commands,
649 dist_cache,
650 *recoder_state,
651 &block_type,
652 params,
653 context_type,
654 );
655 prior_selector.choose_bitmask();
656 }
657 let prediction_mode = prior_selector.take_prediction_mode();
658 prior_selector.free(alloc);
659 let mut command_queue = CommandQueue::new(
660 alloc,
661 commands.len(),
662 prediction_mode,
663 input,
664 params.stride_detection_quality,
665 params.high_entropy_detection_quality,
666 context_map_entropy,
667 best_strides,
668 entropy_tally_scratch,
669 entropy_pyramid,
670 );
671
672 *recoder_state = process_command_queue(
673 &mut command_queue,
674 input,
675 commands,
676 dist_cache,
677 *recoder_state,
678 &block_type,
679 params,
680 context_type,
681 );
682 command_queue.free(callback).unwrap();
683 }
686
687static kBlockLengthPrefixCode: [PrefixCodeRange; BROTLI_NUM_BLOCK_LEN_SYMBOLS] = [
688 PrefixCodeRange {
689 offset: 1u32,
690 nbits: 2u32,
691 },
692 PrefixCodeRange {
693 offset: 5u32,
694 nbits: 2u32,
695 },
696 PrefixCodeRange {
697 offset: 9u32,
698 nbits: 2u32,
699 },
700 PrefixCodeRange {
701 offset: 13u32,
702 nbits: 2u32,
703 },
704 PrefixCodeRange {
705 offset: 17u32,
706 nbits: 3u32,
707 },
708 PrefixCodeRange {
709 offset: 25u32,
710 nbits: 3u32,
711 },
712 PrefixCodeRange {
713 offset: 33u32,
714 nbits: 3u32,
715 },
716 PrefixCodeRange {
717 offset: 41u32,
718 nbits: 3u32,
719 },
720 PrefixCodeRange {
721 offset: 49u32,
722 nbits: 4u32,
723 },
724 PrefixCodeRange {
725 offset: 65u32,
726 nbits: 4u32,
727 },
728 PrefixCodeRange {
729 offset: 81u32,
730 nbits: 4u32,
731 },
732 PrefixCodeRange {
733 offset: 97u32,
734 nbits: 4u32,
735 },
736 PrefixCodeRange {
737 offset: 113u32,
738 nbits: 5u32,
739 },
740 PrefixCodeRange {
741 offset: 145u32,
742 nbits: 5u32,
743 },
744 PrefixCodeRange {
745 offset: 177u32,
746 nbits: 5u32,
747 },
748 PrefixCodeRange {
749 offset: 209u32,
750 nbits: 5u32,
751 },
752 PrefixCodeRange {
753 offset: 241u32,
754 nbits: 6u32,
755 },
756 PrefixCodeRange {
757 offset: 305u32,
758 nbits: 6u32,
759 },
760 PrefixCodeRange {
761 offset: 369u32,
762 nbits: 7u32,
763 },
764 PrefixCodeRange {
765 offset: 497u32,
766 nbits: 8u32,
767 },
768 PrefixCodeRange {
769 offset: 753u32,
770 nbits: 9u32,
771 },
772 PrefixCodeRange {
773 offset: 1265u32,
774 nbits: 10u32,
775 },
776 PrefixCodeRange {
777 offset: 2289u32,
778 nbits: 11u32,
779 },
780 PrefixCodeRange {
781 offset: 4337u32,
782 nbits: 12u32,
783 },
784 PrefixCodeRange {
785 offset: 8433u32,
786 nbits: 13u32,
787 },
788 PrefixCodeRange {
789 offset: 16625u32,
790 nbits: 24u32,
791 },
792];
793
794fn BrotliWriteBits(n_bits: u8, bits: u64, pos: &mut usize, array: &mut [u8]) {
795 assert_eq!(bits >> n_bits, 0);
796 assert!(n_bits <= 56);
797 let ptr_offset: usize = ((*pos >> 3) as u32) as usize;
798 let mut v = array[ptr_offset] as u64;
799 v |= bits << ((*pos) as u64 & 7);
800 array[ptr_offset + 7] = (v >> 56) as u8;
801 array[ptr_offset + 6] = ((v >> 48) & 0xff) as u8;
802 array[ptr_offset + 5] = ((v >> 40) & 0xff) as u8;
803 array[ptr_offset + 4] = ((v >> 32) & 0xff) as u8;
804 array[ptr_offset + 3] = ((v >> 24) & 0xff) as u8;
805 array[ptr_offset + 2] = ((v >> 16) & 0xff) as u8;
806 array[ptr_offset + 1] = ((v >> 8) & 0xff) as u8;
807 array[ptr_offset] = (v & 0xff) as u8;
808 *pos += n_bits as usize
809}
810
811fn BrotliWriteBitsPrepareStorage(pos: usize, array: &mut [u8]) {
812 assert_eq!(pos & 7, 0);
813 array[pos >> 3] = 0;
814}
815
816fn BrotliStoreHuffmanTreeOfHuffmanTreeToBitMask(
817 num_codes: i32,
818 code_length_bitdepth: &[u8],
819 storage_ix: &mut usize,
820 storage: &mut [u8],
821) {
822 static kStorageOrder: [u8; 18] = [1, 2, 3, 4, 0, 5, 17, 6, 16, 7, 8, 9, 10, 11, 12, 13, 14, 15];
823 static kHuffmanBitLengthHuffmanCodeSymbols: [u8; 6] = [0, 7, 3, 2, 1, 15];
824 static kHuffmanBitLengthHuffmanCodeBitLengths: [u8; 6] = [2, 4, 3, 2, 2, 4];
825 let mut skip_some: u64 = 0u64;
826 let mut codes_to_store: u64 = 18;
827 if num_codes > 1i32 {
828 'break5: while codes_to_store > 0 {
829 {
830 if code_length_bitdepth
831 [(kStorageOrder[codes_to_store.wrapping_sub(1) as usize] as usize)]
832 as i32
833 != 0i32
834 {
835 {
836 break 'break5;
837 }
838 }
839 }
840 codes_to_store = codes_to_store.wrapping_sub(1);
841 }
842 }
843 if code_length_bitdepth[(kStorageOrder[0] as usize)] as i32 == 0i32
844 && (code_length_bitdepth[(kStorageOrder[1] as usize)] as i32 == 0i32)
845 {
846 skip_some = 2;
847 if code_length_bitdepth[(kStorageOrder[2] as usize)] as i32 == 0i32 {
848 skip_some = 3;
849 }
850 }
851 BrotliWriteBits(2, skip_some, storage_ix, storage);
852 {
853 let mut i: u64;
854 i = skip_some;
855 while i < codes_to_store {
856 {
857 let l: usize = code_length_bitdepth[(kStorageOrder[i as usize] as usize)] as usize;
858 BrotliWriteBits(
859 kHuffmanBitLengthHuffmanCodeBitLengths[l],
860 kHuffmanBitLengthHuffmanCodeSymbols[l] as u64,
861 storage_ix,
862 storage,
863 );
864 }
865 i = i.wrapping_add(1);
866 }
867 }
868}
869
870fn BrotliStoreHuffmanTreeToBitMask(
871 huffman_tree_size: usize,
872 huffman_tree: &[u8],
873 huffman_tree_extra_bits: &[u8],
874 code_length_bitdepth: &[u8],
875 code_length_bitdepth_symbols: &[u16],
876 storage_ix: &mut usize,
877 storage: &mut [u8],
878) {
879 let mut i: usize;
880 i = 0usize;
881 while i < huffman_tree_size {
882 {
883 let ix: usize = huffman_tree[i] as usize;
884 BrotliWriteBits(
885 code_length_bitdepth[ix],
886 code_length_bitdepth_symbols[ix] as (u64),
887 storage_ix,
888 storage,
889 );
890 if ix == 16usize {
891 BrotliWriteBits(2, huffman_tree_extra_bits[i] as (u64), storage_ix, storage);
892 } else if ix == 17usize {
893 BrotliWriteBits(3, huffman_tree_extra_bits[i] as (u64), storage_ix, storage);
894 }
895 }
896 i = i.wrapping_add(1);
897 }
898}
899
900pub fn BrotliStoreHuffmanTree(
901 depths: &[u8],
902 num: usize,
903 tree: &mut [HuffmanTree],
904 storage_ix: &mut usize,
905 storage: &mut [u8],
906) {
907 let mut huffman_tree = [0u8; 704];
908 let mut huffman_tree_extra_bits = [0u8; 704];
909 let mut huffman_tree_size = 0usize;
910 let mut code_length_bitdepth = [0u8; 18];
911 let mut code_length_bitdepth_symbols = [0u16; 18];
912 let mut huffman_tree_histogram = [0u32; 18];
913 let mut i: usize;
914 let mut num_codes: i32 = 0i32;
915 let mut code: usize = 0usize;
916 0i32;
917 BrotliWriteHuffmanTree(
918 depths,
919 num,
920 &mut huffman_tree_size,
921 &mut huffman_tree[..],
922 &mut huffman_tree_extra_bits[..],
923 );
924 i = 0usize;
925 while i < huffman_tree_size {
926 {
927 let _rhs = 1;
928 let _lhs = &mut huffman_tree_histogram[huffman_tree[i] as usize];
929 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
930 }
931 i = i.wrapping_add(1);
932 }
933 i = 0usize;
934 'break3: while i < 18usize {
935 {
936 if huffman_tree_histogram[i] != 0 {
937 if num_codes == 0i32 {
938 code = i;
939 num_codes = 1i32;
940 } else if num_codes == 1i32 {
941 num_codes = 2i32;
942 {
943 {
944 break 'break3;
945 }
946 }
947 }
948 }
949 }
950 i = i.wrapping_add(1);
951 }
952 BrotliCreateHuffmanTree(
953 &mut huffman_tree_histogram,
954 18usize,
955 5i32,
956 tree,
957 &mut code_length_bitdepth,
958 );
959 BrotliConvertBitDepthsToSymbols(
960 &mut code_length_bitdepth,
961 18usize,
962 &mut code_length_bitdepth_symbols,
963 );
964 BrotliStoreHuffmanTreeOfHuffmanTreeToBitMask(
965 num_codes,
966 &code_length_bitdepth,
967 storage_ix,
968 storage,
969 );
970 if num_codes == 1i32 {
971 code_length_bitdepth[code] = 0u8;
972 }
973 BrotliStoreHuffmanTreeToBitMask(
974 huffman_tree_size,
975 &huffman_tree,
976 &huffman_tree_extra_bits,
977 &code_length_bitdepth,
978 &code_length_bitdepth_symbols,
979 storage_ix,
980 storage,
981 );
982}
983
984fn StoreStaticCodeLengthCode(storage_ix: &mut usize, storage: &mut [u8]) {
985 BrotliWriteBits(
986 40,
987 0xffu32 as (u64) << 32 | 0x55555554u32 as (u64),
988 storage_ix,
989 storage,
990 );
991}
992
993pub struct SimpleSortHuffmanTree {}
994
995impl HuffmanComparator for SimpleSortHuffmanTree {
996 fn Cmp(&self, v0: &HuffmanTree, v1: &HuffmanTree) -> bool {
997 v0.total_count_ < v1.total_count_
998 }
999}
1000
1001pub fn BrotliBuildAndStoreHuffmanTreeFast<AllocHT: alloc::Allocator<HuffmanTree>>(
1002 m: &mut AllocHT,
1003 histogram: &[u32],
1004 histogram_total: usize,
1005 max_bits: usize,
1006 depth: &mut [u8],
1007 bits: &mut [u16],
1008 storage_ix: &mut usize,
1009 storage: &mut [u8],
1010) {
1011 let mut count: u64 = 0;
1012 let mut symbols: [u64; 4] = [0; 4];
1013 let mut length: u64 = 0;
1014 let mut total: usize = histogram_total;
1015 while total != 0usize {
1016 if histogram[(length as usize)] != 0 {
1017 if count < 4 {
1018 symbols[count as usize] = length;
1019 }
1020 count = count.wrapping_add(1);
1021 total = total.wrapping_sub(histogram[(length as usize)] as usize);
1022 }
1023 length = length.wrapping_add(1);
1024 }
1025 if count <= 1 {
1026 BrotliWriteBits(4, 1, storage_ix, storage);
1027 BrotliWriteBits(max_bits as u8, symbols[0], storage_ix, storage);
1028 depth[symbols[0] as usize] = 0u8;
1029 bits[symbols[0] as usize] = 0u16;
1030 return;
1031 }
1032 for depth_elem in depth[..(length as usize)].iter_mut() {
1033 *depth_elem = 0; }
1035 {
1036 let max_tree_size: u64 = (2u64).wrapping_mul(length).wrapping_add(1);
1037 let mut tree = if max_tree_size != 0 {
1038 m.alloc_cell(max_tree_size as usize)
1039 } else {
1040 AllocHT::AllocatedMemory::default() };
1042 let mut count_limit: u32;
1043 if !(0i32 == 0) {
1044 return;
1045 }
1046 count_limit = 1u32;
1047 'break11: loop {
1048 {
1049 let mut node_index: u32 = 0u32;
1050 let mut l: u64;
1051 l = length;
1052 while l != 0 {
1053 l = l.wrapping_sub(1);
1054 if histogram[(l as usize)] != 0 {
1055 if histogram[(l as usize)] >= count_limit {
1056 InitHuffmanTree(
1057 &mut tree.slice_mut()[(node_index as usize)],
1058 histogram[(l as usize)],
1059 -1i16,
1060 l as i16,
1061 );
1062 } else {
1063 InitHuffmanTree(
1064 &mut tree.slice_mut()[(node_index as usize)],
1065 count_limit,
1066 -1i16,
1067 l as i16,
1068 );
1069 }
1070 node_index = node_index.wrapping_add(1);
1071 }
1072 }
1073 {
1074 let n: i32 = node_index as i32;
1075
1076 let mut i: i32 = 0i32;
1077 let mut j: i32 = n + 1i32;
1078 let mut k: i32;
1079 SortHuffmanTreeItems(tree.slice_mut(), n as usize, SimpleSortHuffmanTree {});
1080 let sentinel: HuffmanTree = NewHuffmanTree(!(0u32), -1i16, -1i16);
1081 tree.slice_mut()[(node_index.wrapping_add(1) as usize)] = sentinel;
1082 tree.slice_mut()[(node_index as usize)] = sentinel;
1083 node_index = node_index.wrapping_add(2);
1084 k = n - 1i32;
1085 while k > 0i32 {
1086 {
1087 let left: i32;
1088 let right: i32;
1089 if (tree.slice()[(i as usize)]).total_count_
1090 <= (tree.slice()[(j as usize)]).total_count_
1091 {
1092 left = i;
1093 i += 1;
1094 } else {
1095 left = j;
1096 j += 1;
1097 }
1098 if (tree.slice()[(i as usize)]).total_count_
1099 <= (tree.slice()[(j as usize)]).total_count_
1100 {
1101 right = i;
1102 i += 1;
1103 } else {
1104 right = j;
1105 j += 1;
1106 }
1107 let sum_total = (tree.slice()[(left as usize)])
1108 .total_count_
1109 .wrapping_add((tree.slice()[(right as usize)]).total_count_);
1110 let tree_ind = (node_index.wrapping_sub(1) as usize);
1111 (tree.slice_mut()[tree_ind]).total_count_ = sum_total;
1112 (tree.slice_mut()[tree_ind]).index_left_ = left as i16;
1113 (tree.slice_mut()[tree_ind]).index_right_or_value_ = right as i16;
1114 tree.slice_mut()[(node_index as usize)] = sentinel;
1115 node_index = node_index.wrapping_add(1);
1116 }
1117 k -= 1;
1118 }
1119 if BrotliSetDepth(2i32 * n - 1i32, tree.slice_mut(), depth, 14i32) {
1120 {
1121 break 'break11;
1122 }
1123 }
1124 }
1125 }
1126 count_limit = count_limit.wrapping_mul(2);
1127 }
1128 {
1129 m.free_cell(core::mem::take(&mut tree));
1130 }
1131 }
1132 BrotliConvertBitDepthsToSymbols(depth, length as usize, bits);
1133 if count <= 4 {
1134 let mut i: u64;
1135 BrotliWriteBits(2, 1, storage_ix, storage);
1136 BrotliWriteBits(2, count.wrapping_sub(1), storage_ix, storage);
1137 i = 0;
1138 while i < count {
1139 {
1140 let mut j: u64;
1141 j = i.wrapping_add(1);
1142 while j < count {
1143 {
1144 if (depth[(symbols[j as usize] as usize)] as i32)
1145 < depth[(symbols[i as usize] as usize)] as i32
1146 {
1147 symbols.swap(j as usize, i as usize);
1148 }
1149 }
1150 j = j.wrapping_add(1);
1151 }
1152 }
1153 i = i.wrapping_add(1);
1154 }
1155 if count == 2 {
1156 BrotliWriteBits(max_bits as u8, symbols[0], storage_ix, storage);
1157 BrotliWriteBits(max_bits as u8, symbols[1], storage_ix, storage);
1158 } else if count == 3 {
1159 BrotliWriteBits(max_bits as u8, symbols[0], storage_ix, storage);
1160 BrotliWriteBits(max_bits as u8, symbols[1], storage_ix, storage);
1161 BrotliWriteBits(max_bits as u8, symbols[2], storage_ix, storage);
1162 } else {
1163 BrotliWriteBits(max_bits as u8, symbols[0], storage_ix, storage);
1164 BrotliWriteBits(max_bits as u8, symbols[1], storage_ix, storage);
1165 BrotliWriteBits(max_bits as u8, symbols[2], storage_ix, storage);
1166 BrotliWriteBits(max_bits as u8, symbols[3], storage_ix, storage);
1167 BrotliWriteBits(
1168 1,
1169 if depth[(symbols[0] as usize)] as i32 == 1i32 {
1170 1i32
1171 } else {
1172 0i32
1173 } as (u64),
1174 storage_ix,
1175 storage,
1176 );
1177 }
1178 } else {
1179 let mut previous_value: u8 = 8u8;
1180 let mut i: u64;
1181 StoreStaticCodeLengthCode(storage_ix, storage);
1182 i = 0;
1183 while i < length {
1184 let value: u8 = depth[(i as usize)];
1185 let mut reps: u64 = 1;
1186 let mut k: u64;
1187 k = i.wrapping_add(1);
1188 while k < length && (depth[(k as usize)] as i32 == value as i32) {
1189 {
1190 reps = reps.wrapping_add(1);
1191 }
1192 k = k.wrapping_add(1);
1193 }
1194 i = i.wrapping_add(reps);
1195 if value as i32 == 0i32 {
1196 BrotliWriteBits(
1197 kZeroRepsDepth[reps as usize] as u8,
1198 kZeroRepsBits[reps as usize] as u64,
1199 storage_ix,
1200 storage,
1201 );
1202 } else {
1203 if previous_value as i32 != value as i32 {
1204 BrotliWriteBits(
1205 kCodeLengthDepth[value as usize],
1206 kCodeLengthBits[value as usize] as (u64),
1207 storage_ix,
1208 storage,
1209 );
1210 reps = reps.wrapping_sub(1);
1211 }
1212 if reps < 3 {
1213 while reps != 0 {
1214 reps = reps.wrapping_sub(1);
1215 BrotliWriteBits(
1216 kCodeLengthDepth[value as usize],
1217 kCodeLengthBits[value as usize] as (u64),
1218 storage_ix,
1219 storage,
1220 );
1221 }
1222 } else {
1223 reps = reps.wrapping_sub(3);
1224 BrotliWriteBits(
1225 kNonZeroRepsDepth[reps as usize] as u8,
1226 kNonZeroRepsBits[reps as usize] as u64,
1227 storage_ix,
1228 storage,
1229 );
1230 }
1231 previous_value = value;
1232 }
1233 }
1234 }
1235}
1236
1237pub struct MetaBlockSplit<
1238 Alloc: alloc::Allocator<u8>
1239 + alloc::Allocator<u32>
1240 + alloc::Allocator<HistogramLiteral>
1241 + alloc::Allocator<HistogramCommand>
1242 + alloc::Allocator<HistogramDistance>,
1243> {
1244 pub literal_split: BlockSplit<Alloc>,
1245 pub command_split: BlockSplit<Alloc>,
1246 pub distance_split: BlockSplit<Alloc>,
1247 pub literal_context_map: <Alloc as Allocator<u32>>::AllocatedMemory,
1248 pub literal_context_map_size: usize,
1249 pub distance_context_map: <Alloc as Allocator<u32>>::AllocatedMemory,
1250 pub distance_context_map_size: usize,
1251 pub literal_histograms: <Alloc as Allocator<HistogramLiteral>>::AllocatedMemory,
1252 pub literal_histograms_size: usize,
1253 pub command_histograms: <Alloc as Allocator<HistogramCommand>>::AllocatedMemory,
1254 pub command_histograms_size: usize,
1255 pub distance_histograms: <Alloc as Allocator<HistogramDistance>>::AllocatedMemory,
1256 pub distance_histograms_size: usize,
1257}
1258impl<
1259 Alloc: alloc::Allocator<u8>
1260 + alloc::Allocator<u32>
1261 + alloc::Allocator<HistogramLiteral>
1262 + alloc::Allocator<HistogramCommand>
1263 + alloc::Allocator<HistogramDistance>,
1264 > Default for MetaBlockSplit<Alloc>
1265{
1266 fn default() -> Self {
1267 Self {
1268 literal_split: BlockSplit::<Alloc>::new(),
1269 command_split: BlockSplit::<Alloc>::new(),
1270 distance_split: BlockSplit::<Alloc>::new(),
1271 literal_context_map: <Alloc as Allocator<u32>>::AllocatedMemory::default(),
1272 literal_context_map_size: 0,
1273 distance_context_map: <Alloc as Allocator<u32>>::AllocatedMemory::default(),
1274 distance_context_map_size: 0,
1275 literal_histograms: <Alloc as Allocator<HistogramLiteral>>::AllocatedMemory::default(),
1276 literal_histograms_size: 0,
1277 command_histograms: <Alloc as Allocator<HistogramCommand>>::AllocatedMemory::default(),
1278 command_histograms_size: 0,
1279 distance_histograms: <Alloc as Allocator<HistogramDistance>>::AllocatedMemory::default(
1280 ),
1281 distance_histograms_size: 0,
1282 }
1283 }
1284}
1285
1286impl<
1287 Alloc: alloc::Allocator<u8>
1288 + alloc::Allocator<u32>
1289 + alloc::Allocator<HistogramLiteral>
1290 + alloc::Allocator<HistogramCommand>
1291 + alloc::Allocator<HistogramDistance>,
1292 > MetaBlockSplit<Alloc>
1293{
1294 pub fn new() -> Self {
1295 Self::default()
1296 }
1297
1298 pub fn destroy(&mut self, alloc: &mut Alloc) {
1299 self.literal_split.destroy(alloc);
1300 self.command_split.destroy(alloc);
1301 self.distance_split.destroy(alloc);
1302 <Alloc as Allocator<u32>>::free_cell(alloc, core::mem::take(&mut self.literal_context_map));
1303 self.literal_context_map_size = 0;
1304 <Alloc as Allocator<u32>>::free_cell(
1305 alloc,
1306 core::mem::take(&mut self.distance_context_map),
1307 );
1308 self.distance_context_map_size = 0;
1309 <Alloc as Allocator<HistogramLiteral>>::free_cell(
1310 alloc,
1311 core::mem::take(&mut self.literal_histograms),
1312 );
1313
1314 self.literal_histograms_size = 0;
1315 <Alloc as Allocator<HistogramCommand>>::free_cell(
1316 alloc,
1317 core::mem::take(&mut self.command_histograms),
1318 );
1319 self.command_histograms_size = 0;
1320 <Alloc as Allocator<HistogramDistance>>::free_cell(
1321 alloc,
1322 core::mem::take(&mut self.distance_histograms),
1323 );
1324 self.distance_histograms_size = 0;
1325 }
1326}
1327#[derive(Clone, Copy)]
1328pub struct BlockTypeCodeCalculator {
1329 pub last_type: usize,
1330 pub second_last_type: usize,
1331}
1332
1333pub struct BlockSplitCode {
1334 pub type_code_calculator: BlockTypeCodeCalculator,
1335 pub type_depths: [u8; 258],
1336 pub type_bits: [u16; 258],
1337 pub length_depths: [u8; 26],
1338 pub length_bits: [u16; 26],
1339}
1340
1341pub struct BlockEncoder<'a, Alloc: alloc::Allocator<u8> + alloc::Allocator<u16>> {
1342 pub histogram_length_: usize,
1347 pub num_block_types_: usize,
1348 pub block_types_: &'a [u8],
1349 pub block_lengths_: &'a [u32],
1350 pub num_blocks_: usize,
1351 pub block_split_code_: BlockSplitCode,
1352 pub block_ix_: usize,
1353 pub block_len_: usize,
1354 pub entropy_ix_: usize,
1355 pub depths_: <Alloc as Allocator<u8>>::AllocatedMemory,
1356 pub bits_: <Alloc as Allocator<u16>>::AllocatedMemory,
1357}
1358
1359fn Log2FloorNonZero(mut n: u64) -> u32 {
1360 let mut result: u32 = 0u32;
1361 'loop1: loop {
1362 if {
1363 n >>= 1i32;
1364 n
1365 } != 0
1366 {
1367 result = result.wrapping_add(1);
1368 continue 'loop1;
1369 } else {
1370 break 'loop1;
1371 }
1372 }
1373 result
1374}
1375
1376fn BrotliEncodeMlen(length: u32, bits: &mut u64, numbits: &mut u32, nibblesbits: &mut u32) {
1377 let lg: u32 = (if length == 1u32 {
1378 1u32
1379 } else {
1380 Log2FloorNonZero(length.wrapping_sub(1) as (u64)).wrapping_add(1)
1381 });
1382 let mnibbles: u32 = (if lg < 16u32 {
1383 16u32
1384 } else {
1385 lg.wrapping_add(3)
1386 })
1387 .wrapping_div(4);
1388 assert!(length > 0);
1389 assert!(length <= (1 << 24));
1390 assert!(lg <= 24);
1391 *nibblesbits = mnibbles.wrapping_sub(4);
1392 *numbits = mnibbles.wrapping_mul(4);
1393 *bits = length.wrapping_sub(1) as u64;
1394}
1395
1396fn StoreCompressedMetaBlockHeader(
1397 is_final_block: i32,
1398 length: usize,
1399 storage_ix: &mut usize,
1400 storage: &mut [u8],
1401) {
1402 let mut lenbits: u64 = 0;
1403 let mut nlenbits: u32 = 0;
1404 let mut nibblesbits: u32 = 0;
1405 BrotliWriteBits(1, is_final_block as (u64), storage_ix, storage);
1406 if is_final_block != 0 {
1407 BrotliWriteBits(1, 0, storage_ix, storage);
1408 }
1409 BrotliEncodeMlen(length as u32, &mut lenbits, &mut nlenbits, &mut nibblesbits);
1410 BrotliWriteBits(2, nibblesbits as u64, storage_ix, storage);
1411 BrotliWriteBits(nlenbits as u8, lenbits, storage_ix, storage);
1412 if is_final_block == 0 {
1413 BrotliWriteBits(1, 0, storage_ix, storage);
1414 }
1415}
1416
1417fn NewBlockTypeCodeCalculator() -> BlockTypeCodeCalculator {
1418 BlockTypeCodeCalculator {
1419 last_type: 1,
1420 second_last_type: 0,
1421 }
1422}
1423
1424fn NewBlockEncoder<'a, Alloc: alloc::Allocator<u8> + alloc::Allocator<u16>>(
1425 histogram_length: usize,
1426 num_block_types: usize,
1427 block_types: &'a [u8],
1428 block_lengths: &'a [u32],
1429 num_blocks: usize,
1430) -> BlockEncoder<'a, Alloc> {
1431 let block_len: usize;
1432 if num_blocks != 0 && !block_lengths.is_empty() {
1433 block_len = block_lengths[0] as usize;
1434 } else {
1435 block_len = 0;
1436 }
1437 BlockEncoder::<Alloc> {
1438 histogram_length_: histogram_length,
1439 num_block_types_: num_block_types,
1440 block_types_: block_types,
1441 block_lengths_: block_lengths,
1442 num_blocks_: num_blocks,
1443 block_split_code_: BlockSplitCode {
1444 type_code_calculator: NewBlockTypeCodeCalculator(),
1445 type_depths: [0; 258],
1446 type_bits: [0; 258],
1447 length_depths: [0; 26],
1448 length_bits: [0; 26],
1449 },
1450 block_ix_: 0,
1451 block_len_: block_len,
1452 entropy_ix_: 0,
1453 depths_: <Alloc as Allocator<u8>>::AllocatedMemory::default(),
1454 bits_: <Alloc as Allocator<u16>>::AllocatedMemory::default(),
1455 }
1456}
1457
1458fn NextBlockTypeCode(calculator: &mut BlockTypeCodeCalculator, type_: u8) -> usize {
1459 let type_code: usize = (if type_ as usize == calculator.last_type.wrapping_add(1) {
1460 1u32
1461 } else if type_ as usize == calculator.second_last_type {
1462 0u32
1463 } else {
1464 (type_ as u32).wrapping_add(2)
1465 }) as usize;
1466 calculator.second_last_type = calculator.last_type;
1467 calculator.last_type = type_ as usize;
1468 type_code
1469}
1470
1471fn BlockLengthPrefixCode(len: u32) -> u32 {
1472 let mut code: u32 = (if len >= 177u32 {
1473 if len >= 753u32 {
1474 20i32
1475 } else {
1476 14i32
1477 }
1478 } else if len >= 41u32 {
1479 7i32
1480 } else {
1481 0i32
1482 }) as u32;
1483 while code < (26i32 - 1i32) as u32
1484 && (len >= kBlockLengthPrefixCode[code.wrapping_add(1) as usize].offset)
1485 {
1486 code = code.wrapping_add(1);
1487 }
1488 code
1489}
1490
1491fn StoreVarLenUint8(n: u64, storage_ix: &mut usize, storage: &mut [u8]) {
1492 if n == 0 {
1493 BrotliWriteBits(1, 0, storage_ix, storage);
1494 } else {
1495 let nbits: u8 = Log2FloorNonZero(n) as u8;
1496 BrotliWriteBits(1, 1, storage_ix, storage);
1497 BrotliWriteBits(3, nbits as u64, storage_ix, storage);
1498 BrotliWriteBits(nbits, n.wrapping_sub(1u64 << nbits), storage_ix, storage);
1499 }
1500}
1501
1502fn StoreSimpleHuffmanTree(
1503 depths: &[u8],
1504 symbols: &mut [usize],
1505 num_symbols: usize,
1506 max_bits: usize,
1507 storage_ix: &mut usize,
1508 storage: &mut [u8],
1509) {
1510 BrotliWriteBits(2, 1, storage_ix, storage);
1511 BrotliWriteBits(2, num_symbols.wrapping_sub(1) as u64, storage_ix, storage);
1512 {
1513 let mut i: usize;
1514 i = 0usize;
1515 while i < num_symbols {
1516 {
1517 let mut j: usize;
1518 j = i.wrapping_add(1);
1519 while j < num_symbols {
1520 {
1521 if (depths[symbols[j]] as i32) < depths[symbols[i]] as i32 {
1522 symbols.swap(j, i);
1523 }
1524 }
1525 j = j.wrapping_add(1);
1526 }
1527 }
1528 i = i.wrapping_add(1);
1529 }
1530 }
1531 if num_symbols == 2usize {
1532 BrotliWriteBits(max_bits as u8, symbols[0] as u64, storage_ix, storage);
1533 BrotliWriteBits(max_bits as u8, symbols[1] as u64, storage_ix, storage);
1534 } else if num_symbols == 3usize {
1535 BrotliWriteBits(max_bits as u8, symbols[0] as u64, storage_ix, storage);
1536 BrotliWriteBits(max_bits as u8, symbols[1] as u64, storage_ix, storage);
1537 BrotliWriteBits(max_bits as u8, symbols[2] as u64, storage_ix, storage);
1538 } else {
1539 BrotliWriteBits(max_bits as u8, symbols[0] as u64, storage_ix, storage);
1540 BrotliWriteBits(max_bits as u8, symbols[1] as u64, storage_ix, storage);
1541 BrotliWriteBits(max_bits as u8, symbols[2] as u64, storage_ix, storage);
1542 BrotliWriteBits(max_bits as u8, symbols[3] as u64, storage_ix, storage);
1543 BrotliWriteBits(
1544 1,
1545 if depths[symbols[0]] as i32 == 1i32 {
1546 1i32
1547 } else {
1548 0i32
1549 } as (u64),
1550 storage_ix,
1551 storage,
1552 );
1553 }
1554}
1555
1556fn BuildAndStoreHuffmanTree(
1557 histogram: &[u32],
1558 histogram_length: usize,
1559 alphabet_size: usize,
1560 tree: &mut [HuffmanTree],
1561 depth: &mut [u8],
1562 bits: &mut [u16],
1563 storage_ix: &mut usize,
1564 storage: &mut [u8],
1565) {
1566 let mut count: usize = 0usize;
1567 let mut s4 = [0usize; 4];
1568 let mut i: usize;
1569 let mut max_bits: usize = 0usize;
1570 i = 0usize;
1571 'break31: while i < histogram_length {
1572 {
1573 if histogram[i] != 0 {
1574 if count < 4usize {
1575 s4[count] = i;
1576 } else if count > 4usize {
1577 {
1578 break 'break31;
1579 }
1580 }
1581 count = count.wrapping_add(1);
1582 }
1583 }
1584 i = i.wrapping_add(1);
1585 }
1586 {
1587 let mut max_bits_counter: usize = alphabet_size.wrapping_sub(1);
1588 while max_bits_counter != 0 {
1589 max_bits_counter >>= 1i32;
1590 max_bits = max_bits.wrapping_add(1);
1591 }
1592 }
1593 if count <= 1 {
1594 BrotliWriteBits(4, 1, storage_ix, storage);
1595 BrotliWriteBits(max_bits as u8, s4[0] as u64, storage_ix, storage);
1596 depth[s4[0]] = 0u8;
1597 bits[s4[0]] = 0u16;
1598 return;
1599 }
1600
1601 for depth_elem in depth[..histogram_length].iter_mut() {
1602 *depth_elem = 0; }
1604 BrotliCreateHuffmanTree(histogram, histogram_length, 15i32, tree, depth);
1605 BrotliConvertBitDepthsToSymbols(depth, histogram_length, bits);
1606 if count <= 4usize {
1607 StoreSimpleHuffmanTree(depth, &mut s4[..], count, max_bits, storage_ix, storage);
1608 } else {
1609 BrotliStoreHuffmanTree(depth, histogram_length, tree, storage_ix, storage);
1610 }
1611}
1612
1613fn GetBlockLengthPrefixCode(len: u32, code: &mut usize, n_extra: &mut u32, extra: &mut u32) {
1614 *code = BlockLengthPrefixCode(len) as usize;
1615 *n_extra = kBlockLengthPrefixCode[*code].nbits;
1616 *extra = len.wrapping_sub(kBlockLengthPrefixCode[*code].offset);
1617}
1618
1619fn StoreBlockSwitch(
1620 code: &mut BlockSplitCode,
1621 block_len: u32,
1622 block_type: u8,
1623 is_first_block: i32,
1624 storage_ix: &mut usize,
1625 storage: &mut [u8],
1626) {
1627 let typecode: usize = NextBlockTypeCode(&mut code.type_code_calculator, block_type);
1628 let mut lencode: usize = 0;
1629 let mut len_nextra: u32 = 0;
1630 let mut len_extra: u32 = 0;
1631 if is_first_block == 0 {
1632 BrotliWriteBits(
1633 code.type_depths[typecode] as u8,
1634 code.type_bits[typecode] as (u64),
1635 storage_ix,
1636 storage,
1637 );
1638 }
1639 GetBlockLengthPrefixCode(block_len, &mut lencode, &mut len_nextra, &mut len_extra);
1640 BrotliWriteBits(
1641 code.length_depths[lencode],
1642 code.length_bits[lencode] as (u64),
1643 storage_ix,
1644 storage,
1645 );
1646 BrotliWriteBits(len_nextra as u8, len_extra as (u64), storage_ix, storage);
1647}
1648
1649fn BuildAndStoreBlockSplitCode(
1650 types: &[u8],
1651 lengths: &[u32],
1652 num_blocks: usize,
1653 num_types: usize,
1654 tree: &mut [HuffmanTree],
1655 code: &mut BlockSplitCode,
1656 storage_ix: &mut usize,
1657 storage: &mut [u8],
1658) {
1659 let mut type_histo: [u32; 258] = [0; 258];
1660 let mut length_histo: [u32; 26] = [0; 26];
1661 let mut i: usize;
1662 let mut type_code_calculator = NewBlockTypeCodeCalculator();
1663 i = 0usize;
1664 while i < num_blocks {
1665 {
1666 let type_code: usize = NextBlockTypeCode(&mut type_code_calculator, types[i]);
1667 if i != 0usize {
1668 let _rhs = 1;
1669 let _lhs = &mut type_histo[type_code];
1670 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
1671 }
1672 {
1673 let _rhs = 1;
1674 let _lhs = &mut length_histo[BlockLengthPrefixCode(lengths[i]) as usize];
1675 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
1676 }
1677 }
1678 i = i.wrapping_add(1);
1679 }
1680 StoreVarLenUint8(num_types.wrapping_sub(1) as u64, storage_ix, storage);
1681 if num_types > 1 {
1682 BuildAndStoreHuffmanTree(
1683 &mut type_histo[0..],
1684 num_types.wrapping_add(2),
1685 num_types.wrapping_add(2),
1686 tree,
1687 &mut code.type_depths[0..],
1688 &mut code.type_bits[0..],
1689 storage_ix,
1690 storage,
1691 );
1692 BuildAndStoreHuffmanTree(
1693 &mut length_histo[0..],
1694 super::constants::BROTLI_NUM_BLOCK_LEN_SYMBOLS, super::constants::BROTLI_NUM_BLOCK_LEN_SYMBOLS,
1696 tree,
1697 &mut code.length_depths[0..],
1698 &mut code.length_bits[0..],
1699 storage_ix,
1700 storage,
1701 );
1702 StoreBlockSwitch(code, lengths[0], types[0], 1i32, storage_ix, storage);
1703 }
1704}
1705
1706fn BuildAndStoreBlockSwitchEntropyCodes<Alloc: alloc::Allocator<u8> + alloc::Allocator<u16>>(
1707 xself: &mut BlockEncoder<'_, Alloc>,
1708 tree: &mut [HuffmanTree],
1709 storage_ix: &mut usize,
1710 storage: &mut [u8],
1711) {
1712 BuildAndStoreBlockSplitCode(
1713 xself.block_types_,
1714 xself.block_lengths_,
1715 xself.num_blocks_,
1716 xself.num_block_types_,
1717 tree,
1718 &mut xself.block_split_code_,
1719 storage_ix,
1720 storage,
1721 );
1722}
1723
1724fn StoreTrivialContextMap(
1725 num_types: usize,
1726 context_bits: usize,
1727 tree: &mut [HuffmanTree],
1728 storage_ix: &mut usize,
1729 storage: &mut [u8],
1730) {
1731 StoreVarLenUint8(num_types.wrapping_sub(1) as u64, storage_ix, storage);
1732 if num_types > 1 {
1733 let repeat_code: usize = context_bits.wrapping_sub(1u32 as usize);
1734 let repeat_bits: usize = (1u32 << repeat_code).wrapping_sub(1) as usize;
1735 let alphabet_size: usize = num_types.wrapping_add(repeat_code);
1736 let mut histogram: [u32; 272] = [0; 272];
1737 let mut depths: [u8; 272] = [0; 272];
1738 let mut bits: [u16; 272] = [0; 272];
1739 let mut i: usize;
1740 BrotliWriteBits(1u8, 1u64, storage_ix, storage);
1741 BrotliWriteBits(4u8, repeat_code.wrapping_sub(1) as u64, storage_ix, storage);
1742 histogram[repeat_code] = num_types as u32;
1743 histogram[0] = 1u32;
1744 i = context_bits;
1745 while i < alphabet_size {
1746 {
1747 histogram[i] = 1u32;
1748 }
1749 i = i.wrapping_add(1);
1750 }
1751 BuildAndStoreHuffmanTree(
1752 &mut histogram[..],
1753 alphabet_size,
1754 alphabet_size,
1755 tree,
1756 &mut depths[..],
1757 &mut bits[..],
1758 storage_ix,
1759 storage,
1760 );
1761 i = 0usize;
1762 while i < num_types {
1763 {
1764 let code: usize = if i == 0usize {
1765 0usize
1766 } else {
1767 i.wrapping_add(context_bits).wrapping_sub(1)
1768 };
1769 BrotliWriteBits(depths[code], bits[code] as (u64), storage_ix, storage);
1770 BrotliWriteBits(
1771 depths[repeat_code],
1772 bits[repeat_code] as (u64),
1773 storage_ix,
1774 storage,
1775 );
1776 BrotliWriteBits(repeat_code as u8, repeat_bits as u64, storage_ix, storage);
1777 }
1778 i = i.wrapping_add(1);
1779 }
1780 BrotliWriteBits(1, 1, storage_ix, storage);
1781 }
1782}
1783
1784fn IndexOf(v: &[u8], v_size: usize, value: u8) -> usize {
1785 let mut i: usize = 0usize;
1786 while i < v_size {
1787 {
1788 if v[i] as i32 == value as i32 {
1789 return i;
1790 }
1791 }
1792 i = i.wrapping_add(1);
1793 }
1794 i
1795}
1796
1797fn MoveToFront(v: &mut [u8], index: usize) {
1798 let value: u8 = v[index];
1799 let mut i: usize;
1800 i = index;
1801 while i != 0usize {
1802 {
1803 v[i] = v[i.wrapping_sub(1)];
1804 }
1805 i = i.wrapping_sub(1);
1806 }
1807 v[0] = value;
1808}
1809
1810fn MoveToFrontTransform(v_in: &[u32], v_size: usize, v_out: &mut [u32]) {
1811 let mut i: usize;
1812 let mut mtf: [u8; 256] = [0; 256];
1813 let mut max_value: u32;
1814 if v_size == 0usize {
1815 return;
1816 }
1817 max_value = v_in[0];
1818 i = 1;
1819 while i < v_size {
1820 {
1821 if v_in[i] > max_value {
1822 max_value = v_in[i];
1823 }
1824 }
1825 i = i.wrapping_add(1);
1826 }
1827 0i32;
1828 i = 0usize;
1829 while i <= max_value as usize {
1830 {
1831 mtf[i] = i as u8;
1832 }
1833 i = i.wrapping_add(1);
1834 }
1835 {
1836 let mtf_size: usize = max_value.wrapping_add(1) as usize;
1837 i = 0usize;
1838 while i < v_size {
1839 {
1840 let index: usize = IndexOf(&mtf[..], mtf_size, v_in[i] as u8);
1841 0i32;
1842 v_out[i] = index as u32;
1843 MoveToFront(&mut mtf[..], index);
1844 }
1845 i = i.wrapping_add(1);
1846 }
1847 }
1848}
1849
1850fn brotli_max_uint32_t(a: u32, b: u32) -> u32 {
1851 if a > b {
1852 a
1853 } else {
1854 b
1855 }
1856}
1857
1858fn brotli_min_uint32_t(a: u32, b: u32) -> u32 {
1859 if a < b {
1860 a
1861 } else {
1862 b
1863 }
1864}
1865
1866fn RunLengthCodeZeros(
1867 in_size: usize,
1868 v: &mut [u32],
1869 out_size: &mut usize,
1870 max_run_length_prefix: &mut u32,
1871) {
1872 let mut max_reps: u32 = 0u32;
1873 let mut i: usize;
1874 let mut max_prefix: u32;
1875 i = 0usize;
1876 while i < in_size {
1877 let mut reps: u32 = 0u32;
1878 while i < in_size && (v[i] != 0u32) {
1879 i = i.wrapping_add(1);
1880 }
1881 while i < in_size && (v[i] == 0u32) {
1882 {
1883 reps = reps.wrapping_add(1);
1884 }
1885 i = i.wrapping_add(1);
1886 }
1887 max_reps = brotli_max_uint32_t(reps, max_reps);
1888 }
1889 max_prefix = if max_reps > 0u32 {
1890 Log2FloorNonZero(max_reps as (u64))
1891 } else {
1892 0u32
1893 };
1894 max_prefix = brotli_min_uint32_t(max_prefix, *max_run_length_prefix);
1895 *max_run_length_prefix = max_prefix;
1896 *out_size = 0usize;
1897 i = 0usize;
1898 while i < in_size {
1899 0i32;
1900 if v[i] != 0u32 {
1901 v[*out_size] = (v[i]).wrapping_add(*max_run_length_prefix);
1902 i = i.wrapping_add(1);
1903 *out_size = out_size.wrapping_add(1);
1904 } else {
1905 let mut reps: u32 = 1u32;
1906 let mut k: usize;
1907 k = i.wrapping_add(1);
1908 while k < in_size && (v[k] == 0u32) {
1909 {
1910 reps = reps.wrapping_add(1);
1911 }
1912 k = k.wrapping_add(1);
1913 }
1914 i = i.wrapping_add(reps as usize);
1915 while reps != 0u32 {
1916 if reps < 2u32 << max_prefix {
1917 let run_length_prefix: u32 = Log2FloorNonZero(reps as (u64));
1918 let extra_bits: u32 = reps.wrapping_sub(1u32 << run_length_prefix);
1919 v[*out_size] = run_length_prefix.wrapping_add(extra_bits << 9);
1920 *out_size = out_size.wrapping_add(1);
1921 {
1922 {
1923 break;
1924 }
1925 }
1926 } else {
1927 let extra_bits: u32 = (1u32 << max_prefix).wrapping_sub(1);
1928 v[*out_size] = max_prefix.wrapping_add(extra_bits << 9);
1929 reps = reps.wrapping_sub((2u32 << max_prefix).wrapping_sub(1));
1930 *out_size = out_size.wrapping_add(1);
1931 }
1932 }
1933 }
1934 }
1935}
1936
1937fn EncodeContextMap<AllocU32: alloc::Allocator<u32>>(
1938 m: &mut AllocU32,
1939 context_map: &[u32],
1940 context_map_size: usize,
1941 num_clusters: usize,
1942 tree: &mut [HuffmanTree],
1943 storage_ix: &mut usize,
1944 storage: &mut [u8],
1945) {
1946 let mut i: usize;
1947 let mut rle_symbols: AllocU32::AllocatedMemory;
1948 let mut max_run_length_prefix: u32 = 6u32;
1949 let mut num_rle_symbols: usize = 0usize;
1950 static kSymbolMask: u32 = (1u32 << 9) - 1;
1951 let mut depths: [u8; 272] = [0; 272];
1952 let mut bits: [u16; 272] = [0; 272];
1953 StoreVarLenUint8(num_clusters.wrapping_sub(1) as u64, storage_ix, storage);
1954 if num_clusters == 1 {
1955 return;
1956 }
1957 rle_symbols = if context_map_size != 0 {
1958 m.alloc_cell(context_map_size)
1959 } else {
1960 AllocU32::AllocatedMemory::default()
1961 };
1962 MoveToFrontTransform(context_map, context_map_size, rle_symbols.slice_mut());
1963 RunLengthCodeZeros(
1964 context_map_size,
1965 rle_symbols.slice_mut(),
1966 &mut num_rle_symbols,
1967 &mut max_run_length_prefix,
1968 );
1969 let mut histogram: [u32; 272] = [0; 272];
1970 i = 0usize;
1971 while i < num_rle_symbols {
1972 {
1973 let _rhs = 1;
1974 let _lhs = &mut histogram[(rle_symbols.slice()[i] & kSymbolMask) as usize];
1975 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
1976 }
1977 i = i.wrapping_add(1);
1978 }
1979 {
1980 let use_rle = max_run_length_prefix > 0;
1981 BrotliWriteBits(1, u64::from(use_rle), storage_ix, storage);
1982 if use_rle {
1983 BrotliWriteBits(
1984 4,
1985 max_run_length_prefix.wrapping_sub(1) as (u64),
1986 storage_ix,
1987 storage,
1988 );
1989 }
1990 }
1991 BuildAndStoreHuffmanTree(
1992 &mut histogram[..],
1993 num_clusters.wrapping_add(max_run_length_prefix as usize),
1994 num_clusters.wrapping_add(max_run_length_prefix as usize),
1995 tree,
1996 &mut depths[..],
1997 &mut bits[..],
1998 storage_ix,
1999 storage,
2000 );
2001 i = 0usize;
2002 while i < num_rle_symbols {
2003 {
2004 let rle_symbol: u32 = rle_symbols.slice()[i] & kSymbolMask;
2005 let extra_bits_val: u32 = rle_symbols.slice()[i] >> 9;
2006 BrotliWriteBits(
2007 depths[rle_symbol as usize],
2008 bits[rle_symbol as usize] as (u64),
2009 storage_ix,
2010 storage,
2011 );
2012 if rle_symbol > 0u32 && (rle_symbol <= max_run_length_prefix) {
2013 BrotliWriteBits(
2014 rle_symbol as u8,
2015 extra_bits_val as (u64),
2016 storage_ix,
2017 storage,
2018 );
2019 }
2020 }
2021 i = i.wrapping_add(1);
2022 }
2023 BrotliWriteBits(1, 1, storage_ix, storage);
2024 m.free_cell(rle_symbols);
2025}
2026
2027fn BuildAndStoreEntropyCodes<
2028 Alloc: alloc::Allocator<u8> + alloc::Allocator<u16>,
2029 HistogramType: SliceWrapper<u32>,
2030>(
2031 m: &mut Alloc,
2032 xself: &mut BlockEncoder<Alloc>,
2033 histograms: &[HistogramType],
2034 histograms_size: usize,
2035 alphabet_size: usize,
2036 tree: &mut [HuffmanTree],
2037 storage_ix: &mut usize,
2038 storage: &mut [u8],
2039) {
2040 let table_size: usize = histograms_size.wrapping_mul(xself.histogram_length_);
2041 xself.depths_ = if table_size != 0 {
2042 <Alloc as Allocator<u8>>::alloc_cell(m, table_size)
2043 } else {
2044 <Alloc as Allocator<u8>>::AllocatedMemory::default()
2045 };
2046 xself.bits_ = if table_size != 0 {
2047 <Alloc as Allocator<u16>>::alloc_cell(m, table_size)
2048 } else {
2049 <Alloc as Allocator<u16>>::AllocatedMemory::default()
2050 };
2051 {
2052 let mut i: usize;
2053 i = 0usize;
2054 while i < histograms_size {
2055 {
2056 let ix: usize = i.wrapping_mul(xself.histogram_length_);
2057 BuildAndStoreHuffmanTree(
2058 &(histograms[i]).slice()[0..],
2059 xself.histogram_length_,
2060 alphabet_size,
2061 tree,
2062 &mut xself.depths_.slice_mut()[ix..],
2063 &mut xself.bits_.slice_mut()[ix..],
2064 storage_ix,
2065 storage,
2066 );
2067 }
2068 i = i.wrapping_add(1);
2069 }
2070 }
2071}
2072
2073fn StoreSymbol<Alloc: alloc::Allocator<u8> + alloc::Allocator<u16>>(
2074 xself: &mut BlockEncoder<Alloc>,
2075 symbol: usize,
2076 storage_ix: &mut usize,
2077 storage: &mut [u8],
2078) {
2079 if xself.block_len_ == 0usize {
2080 let block_ix: usize = {
2081 xself.block_ix_ = xself.block_ix_.wrapping_add(1);
2082 xself.block_ix_
2083 };
2084 let block_len: u32 = xself.block_lengths_[block_ix];
2085 let block_type: u8 = xself.block_types_[block_ix];
2086 xself.block_len_ = block_len as usize;
2087 xself.entropy_ix_ = (block_type as usize).wrapping_mul(xself.histogram_length_);
2088 StoreBlockSwitch(
2089 &mut xself.block_split_code_,
2090 block_len,
2091 block_type,
2092 0i32,
2093 storage_ix,
2094 storage,
2095 );
2096 }
2097 xself.block_len_ = xself.block_len_.wrapping_sub(1);
2098 {
2099 let ix: usize = xself.entropy_ix_.wrapping_add(symbol);
2100 BrotliWriteBits(
2101 xself.depths_.slice()[ix],
2102 xself.bits_.slice()[ix] as (u64),
2103 storage_ix,
2104 storage,
2105 );
2106 }
2107}
2108
2109fn CommandCopyLenCode(xself: &Command) -> u32 {
2110 let modifier = xself.copy_len_ >> 25;
2111 let delta: i32 = ((modifier | ((modifier & 0x40) << 1)) as u8) as i8 as i32;
2112 ((xself.copy_len_ & 0x1ffffff) as i32 + delta) as u32
2113}
2114fn GetInsertExtra(inscode: u16) -> u32 {
2115 kInsExtra[inscode as usize]
2116}
2117
2118fn GetInsertBase(inscode: u16) -> u32 {
2119 kInsBase[inscode as usize]
2120}
2121
2122fn GetCopyBase(copycode: u16) -> u32 {
2123 kCopyBase[copycode as usize]
2124}
2125
2126fn GetCopyExtra(copycode: u16) -> u32 {
2127 kCopyExtra[copycode as usize]
2128}
2129
2130fn StoreCommandExtra(cmd: &Command, storage_ix: &mut usize, storage: &mut [u8]) {
2131 let copylen_code: u32 = CommandCopyLenCode(cmd);
2132 let inscode: u16 = GetInsertLengthCode(cmd.insert_len_ as usize);
2133 let copycode: u16 = GetCopyLengthCode(copylen_code as usize);
2134 let insnumextra: u32 = GetInsertExtra(inscode);
2135 let insextraval: u64 = cmd.insert_len_.wrapping_sub(GetInsertBase(inscode)) as (u64);
2136 let copyextraval: u64 = copylen_code.wrapping_sub(GetCopyBase(copycode)) as (u64);
2137 let bits: u64 = copyextraval << insnumextra | insextraval;
2138 BrotliWriteBits(
2139 insnumextra.wrapping_add(GetCopyExtra(copycode)) as u8,
2140 bits,
2141 storage_ix,
2142 storage,
2143 );
2144}
2145
2146fn Context(p1: u8, p2: u8, mode: ContextType) -> u8 {
2147 match mode {
2148 ContextType::CONTEXT_LSB6 => (p1 as i32 & 0x3fi32) as u8,
2149 ContextType::CONTEXT_MSB6 => (p1 as i32 >> 2) as u8,
2150 ContextType::CONTEXT_UTF8 => {
2151 (kUTF8ContextLookup[p1 as usize] as i32
2152 | kUTF8ContextLookup[(p2 as i32 + 256i32) as usize] as i32) as u8
2153 }
2154 ContextType::CONTEXT_SIGNED => {
2155 (((kSigned3BitContextLookup[p1 as usize] as i32) << 3)
2156 + kSigned3BitContextLookup[p2 as usize] as i32) as u8
2157 }
2158 }
2159 }
2161
2162fn StoreSymbolWithContext<Alloc: alloc::Allocator<u8> + alloc::Allocator<u16>>(
2163 xself: &mut BlockEncoder<Alloc>,
2164 symbol: usize,
2165 context: usize,
2166 context_map: &[u32],
2167 storage_ix: &mut usize,
2168 storage: &mut [u8],
2169 context_bits: usize,
2170) {
2171 if xself.block_len_ == 0usize {
2172 let block_ix: usize = {
2173 xself.block_ix_ = xself.block_ix_.wrapping_add(1);
2174 xself.block_ix_
2175 };
2176 let block_len: u32 = xself.block_lengths_[block_ix];
2177 let block_type: u8 = xself.block_types_[block_ix];
2178 xself.block_len_ = block_len as usize;
2179 xself.entropy_ix_ = (block_type as usize) << context_bits;
2180 StoreBlockSwitch(
2181 &mut xself.block_split_code_,
2182 block_len,
2183 block_type,
2184 0i32,
2185 storage_ix,
2186 storage,
2187 );
2188 }
2189 xself.block_len_ = xself.block_len_.wrapping_sub(1);
2190 {
2191 let histo_ix: usize = context_map[xself.entropy_ix_.wrapping_add(context)] as usize;
2192 let ix: usize = histo_ix
2193 .wrapping_mul(xself.histogram_length_)
2194 .wrapping_add(symbol);
2195 BrotliWriteBits(
2196 xself.depths_.slice()[ix],
2197 xself.bits_.slice()[ix] as (u64),
2198 storage_ix,
2199 storage,
2200 );
2201 }
2202}
2203
2204fn CommandCopyLen(xself: &Command) -> u32 {
2205 xself.copy_len_ & 0xffffffu32
2206}
2207
2208fn CommandDistanceContext(xself: &Command) -> u32 {
2209 let r: u32 = (xself.cmd_prefix_ as i32 >> 6) as u32;
2210 let c: u32 = (xself.cmd_prefix_ as i32 & 7i32) as u32;
2211 if (r == 0u32 || r == 2u32 || r == 4u32 || r == 7u32) && (c <= 2u32) {
2212 return c;
2213 }
2214 3u32
2215}
2216
2217fn CleanupBlockEncoder<Alloc: alloc::Allocator<u8> + alloc::Allocator<u16>>(
2218 m: &mut Alloc,
2219 xself: &mut BlockEncoder<Alloc>,
2220) {
2221 <Alloc as Allocator<u8>>::free_cell(m, core::mem::take(&mut xself.depths_));
2222 <Alloc as Allocator<u16>>::free_cell(m, core::mem::take(&mut xself.bits_));
2223}
2224
2225pub fn JumpToByteBoundary(storage_ix: &mut usize, storage: &mut [u8]) {
2226 *storage_ix = storage_ix.wrapping_add(7u32 as usize) & !7u32 as usize;
2227 storage[(*storage_ix >> 3)] = 0u8;
2228}
2229
2230pub fn BrotliStoreMetaBlock<Alloc: BrotliAlloc, Cb>(
2231 alloc: &mut Alloc,
2232 input: &[u8],
2233 start_pos: usize,
2234 length: usize,
2235 mask: usize,
2236 mut prev_byte: u8,
2237 mut prev_byte2: u8,
2238 is_last: i32,
2239 params: &BrotliEncoderParams,
2240 literal_context_mode: ContextType,
2241 distance_cache: &[i32; kNumDistanceCacheEntries],
2242 commands: &[Command],
2243 n_commands: usize,
2244 mb: &mut MetaBlockSplit<Alloc>,
2245 recoder_state: &mut RecoderState,
2246 storage_ix: &mut usize,
2247 storage: &mut [u8],
2248 callback: &mut Cb,
2249) where
2250 Cb: FnMut(
2251 &mut interface::PredictionModeContextMap<InputReferenceMut>,
2252 &mut [interface::StaticCommand],
2253 InputPair,
2254 &mut Alloc,
2255 ),
2256{
2257 let (input0, input1) = InputPairFromMaskedInput(input, start_pos, length, mask);
2258 if params.log_meta_block {
2259 LogMetaBlock(
2260 alloc,
2261 commands.split_at(n_commands).0,
2262 input0,
2263 input1,
2264 distance_cache,
2265 recoder_state,
2266 block_split_reference(mb),
2267 params,
2268 Some(literal_context_mode),
2269 callback,
2270 );
2271 }
2272 let mut pos: usize = start_pos;
2273 let mut i: usize;
2274 let num_distance_symbols = params.dist.alphabet_size;
2275 let mut num_effective_distance_symbols = num_distance_symbols as usize;
2276 let mut tree: <Alloc as Allocator<HuffmanTree>>::AllocatedMemory;
2277 let _literal_context_lut = BROTLI_CONTEXT_LUT(literal_context_mode);
2278 let mut literal_enc: BlockEncoder<Alloc>;
2279 let mut command_enc: BlockEncoder<Alloc>;
2280 let mut distance_enc: BlockEncoder<Alloc>;
2281 let dist = ¶ms.dist;
2282 if params.large_window && num_effective_distance_symbols > BROTLI_NUM_HISTOGRAM_DISTANCE_SYMBOLS
2283 {
2284 num_effective_distance_symbols = BROTLI_NUM_HISTOGRAM_DISTANCE_SYMBOLS;
2285 }
2286 StoreCompressedMetaBlockHeader(is_last, length, storage_ix, storage);
2287 tree = if 2i32 * 704i32 + 1i32 != 0 {
2288 <Alloc as Allocator<HuffmanTree>>::alloc_cell(alloc, (2i32 * 704i32 + 1i32) as usize)
2289 } else {
2290 <Alloc as Allocator<HuffmanTree>>::AllocatedMemory::default()
2291 };
2292 literal_enc = NewBlockEncoder::<Alloc>(
2293 BROTLI_NUM_LITERAL_SYMBOLS,
2294 mb.literal_split.num_types,
2295 mb.literal_split.types.slice(),
2296 mb.literal_split.lengths.slice(),
2297 mb.literal_split.num_blocks,
2298 );
2299 command_enc = NewBlockEncoder::<Alloc>(
2300 BROTLI_NUM_COMMAND_SYMBOLS,
2301 mb.command_split.num_types,
2302 mb.command_split.types.slice(),
2303 mb.command_split.lengths.slice(),
2304 mb.command_split.num_blocks,
2305 );
2306 distance_enc = NewBlockEncoder::<Alloc>(
2307 num_effective_distance_symbols,
2308 mb.distance_split.num_types,
2309 mb.distance_split.types.slice(),
2310 mb.distance_split.lengths.slice(),
2311 mb.distance_split.num_blocks,
2312 );
2313 BuildAndStoreBlockSwitchEntropyCodes(&mut literal_enc, tree.slice_mut(), storage_ix, storage);
2314 BuildAndStoreBlockSwitchEntropyCodes(&mut command_enc, tree.slice_mut(), storage_ix, storage);
2315 BuildAndStoreBlockSwitchEntropyCodes(&mut distance_enc, tree.slice_mut(), storage_ix, storage);
2316 BrotliWriteBits(2, dist.distance_postfix_bits as (u64), storage_ix, storage);
2317 BrotliWriteBits(
2318 4,
2319 (dist.num_direct_distance_codes >> dist.distance_postfix_bits) as (u64),
2320 storage_ix,
2321 storage,
2322 );
2323 i = 0usize;
2324 while i < mb.literal_split.num_types {
2325 {
2326 BrotliWriteBits(2, literal_context_mode as (u64), storage_ix, storage);
2327 }
2328 i = i.wrapping_add(1);
2329 }
2330 if mb.literal_context_map_size == 0usize {
2331 StoreTrivialContextMap(
2332 mb.literal_histograms_size,
2333 6,
2334 tree.slice_mut(),
2335 storage_ix,
2336 storage,
2337 );
2338 } else {
2339 EncodeContextMap(
2340 alloc,
2341 mb.literal_context_map.slice(),
2342 mb.literal_context_map_size,
2343 mb.literal_histograms_size,
2344 tree.slice_mut(),
2345 storage_ix,
2346 storage,
2347 );
2348 }
2349 if mb.distance_context_map_size == 0usize {
2350 StoreTrivialContextMap(
2351 mb.distance_histograms_size,
2352 2usize,
2353 tree.slice_mut(),
2354 storage_ix,
2355 storage,
2356 );
2357 } else {
2358 EncodeContextMap(
2359 alloc,
2360 mb.distance_context_map.slice(),
2361 mb.distance_context_map_size,
2362 mb.distance_histograms_size,
2363 tree.slice_mut(),
2364 storage_ix,
2365 storage,
2366 );
2367 }
2368 BuildAndStoreEntropyCodes(
2369 alloc,
2370 &mut literal_enc,
2371 mb.literal_histograms.slice(),
2372 mb.literal_histograms_size,
2373 BROTLI_NUM_LITERAL_SYMBOLS,
2374 tree.slice_mut(),
2375 storage_ix,
2376 storage,
2377 );
2378 BuildAndStoreEntropyCodes(
2379 alloc,
2380 &mut command_enc,
2381 mb.command_histograms.slice(),
2382 mb.command_histograms_size,
2383 BROTLI_NUM_COMMAND_SYMBOLS,
2384 tree.slice_mut(),
2385 storage_ix,
2386 storage,
2387 );
2388 BuildAndStoreEntropyCodes(
2389 alloc,
2390 &mut distance_enc,
2391 mb.distance_histograms.slice(),
2392 mb.distance_histograms_size,
2393 num_distance_symbols as usize,
2394 tree.slice_mut(),
2395 storage_ix,
2396 storage,
2397 );
2398 {
2399 <Alloc as Allocator<HuffmanTree>>::free_cell(alloc, core::mem::take(&mut tree));
2400 }
2401 i = 0usize;
2402 while i < n_commands {
2403 {
2404 let cmd: Command = commands[i];
2405 let cmd_code: usize = cmd.cmd_prefix_ as usize;
2406 StoreSymbol(&mut command_enc, cmd_code, storage_ix, storage);
2407 StoreCommandExtra(&cmd, storage_ix, storage);
2408 if mb.literal_context_map_size == 0usize {
2409 let mut j: usize;
2410 j = cmd.insert_len_ as usize;
2411 while j != 0usize {
2412 {
2413 StoreSymbol(
2414 &mut literal_enc,
2415 input[(pos & mask)] as usize,
2416 storage_ix,
2417 storage,
2418 );
2419 pos = pos.wrapping_add(1);
2420 }
2421 j = j.wrapping_sub(1);
2422 }
2423 } else {
2424 let mut j: usize;
2425 j = cmd.insert_len_ as usize;
2426 while j != 0usize {
2427 {
2428 let context: usize =
2429 Context(prev_byte, prev_byte2, literal_context_mode) as usize;
2430 let literal: u8 = input[(pos & mask)];
2431 StoreSymbolWithContext(
2432 &mut literal_enc,
2433 literal as usize,
2434 context,
2435 mb.literal_context_map.slice(),
2436 storage_ix,
2437 storage,
2438 6usize,
2439 );
2440 prev_byte2 = prev_byte;
2441 prev_byte = literal;
2442 pos = pos.wrapping_add(1);
2443 }
2444 j = j.wrapping_sub(1);
2445 }
2446 }
2447 pos = pos.wrapping_add(CommandCopyLen(&cmd) as usize);
2448 if CommandCopyLen(&cmd) != 0 {
2449 prev_byte2 = input[(pos.wrapping_sub(2) & mask)];
2450 prev_byte = input[(pos.wrapping_sub(1) & mask)];
2451 if cmd.cmd_prefix_ as i32 >= 128i32 {
2452 let dist_code: usize = cmd.dist_prefix_ as usize & 0x3ff;
2453 let distnumextra: u32 = u32::from(cmd.dist_prefix_) >> 10; let distextra: u64 = cmd.dist_extra_ as (u64);
2455 if mb.distance_context_map_size == 0usize {
2456 StoreSymbol(&mut distance_enc, dist_code, storage_ix, storage);
2457 } else {
2458 let context: usize = CommandDistanceContext(&cmd) as usize;
2459 StoreSymbolWithContext(
2460 &mut distance_enc,
2461 dist_code,
2462 context,
2463 mb.distance_context_map.slice(),
2464 storage_ix,
2465 storage,
2466 2usize,
2467 );
2468 }
2469 BrotliWriteBits(distnumextra as u8, distextra, storage_ix, storage);
2470 }
2471 }
2472 }
2473 i = i.wrapping_add(1);
2474 }
2475 CleanupBlockEncoder(alloc, &mut distance_enc);
2476 CleanupBlockEncoder(alloc, &mut command_enc);
2477 CleanupBlockEncoder(alloc, &mut literal_enc);
2478 if is_last != 0 {
2479 JumpToByteBoundary(storage_ix, storage);
2480 }
2481}
2482
2483fn BuildHistograms(
2484 input: &[u8],
2485 start_pos: usize,
2486 mask: usize,
2487 commands: &[Command],
2488 n_commands: usize,
2489 lit_histo: &mut HistogramLiteral,
2490 cmd_histo: &mut HistogramCommand,
2491 dist_histo: &mut HistogramDistance,
2492) {
2493 let mut pos: usize = start_pos;
2494 let mut i: usize;
2495 i = 0usize;
2496 while i < n_commands {
2497 {
2498 let cmd: Command = commands[i];
2499 let mut j: usize;
2500 HistogramAddItem(cmd_histo, cmd.cmd_prefix_ as usize);
2501 j = cmd.insert_len_ as usize;
2502 while j != 0usize {
2503 {
2504 HistogramAddItem(lit_histo, input[(pos & mask)] as usize);
2505 pos = pos.wrapping_add(1);
2506 }
2507 j = j.wrapping_sub(1);
2508 }
2509 pos = pos.wrapping_add(CommandCopyLen(&cmd) as usize);
2510 if CommandCopyLen(&cmd) != 0 && (cmd.cmd_prefix_ as i32 >= 128i32) {
2511 HistogramAddItem(dist_histo, cmd.dist_prefix_ as usize & 0x3ff);
2512 }
2513 }
2514 i = i.wrapping_add(1);
2515 }
2516}
2517fn StoreDataWithHuffmanCodes(
2518 input: &[u8],
2519 start_pos: usize,
2520 mask: usize,
2521 commands: &[Command],
2522 n_commands: usize,
2523 lit_depth: &[u8],
2524 lit_bits: &[u16],
2525 cmd_depth: &[u8],
2526 cmd_bits: &[u16],
2527 dist_depth: &[u8],
2528 dist_bits: &[u16],
2529 storage_ix: &mut usize,
2530 storage: &mut [u8],
2531) {
2532 let mut pos: usize = start_pos;
2533 let mut i: usize;
2534 i = 0usize;
2535 while i < n_commands {
2536 {
2537 let cmd: Command = commands[i];
2538 let cmd_code: usize = cmd.cmd_prefix_ as usize;
2539 let mut j: usize;
2540 BrotliWriteBits(
2541 cmd_depth[cmd_code],
2542 cmd_bits[cmd_code] as (u64),
2543 storage_ix,
2544 storage,
2545 );
2546 StoreCommandExtra(&cmd, storage_ix, storage);
2547 j = cmd.insert_len_ as usize;
2548 while j != 0usize {
2549 {
2550 let literal: u8 = input[(pos & mask)];
2551 BrotliWriteBits(
2552 lit_depth[(literal as usize)],
2553 lit_bits[(literal as usize)] as (u64),
2554 storage_ix,
2555 storage,
2556 );
2557 pos = pos.wrapping_add(1);
2558 }
2559 j = j.wrapping_sub(1);
2560 }
2561 pos = pos.wrapping_add(CommandCopyLen(&cmd) as usize);
2562 if CommandCopyLen(&cmd) != 0 && (cmd.cmd_prefix_ as i32 >= 128i32) {
2563 let dist_code: usize = cmd.dist_prefix_ as usize & 0x3ff;
2564 let distnumextra: u32 = u32::from(cmd.dist_prefix_) >> 10;
2565 let distextra: u32 = cmd.dist_extra_;
2566 BrotliWriteBits(
2567 dist_depth[dist_code],
2568 dist_bits[dist_code] as (u64),
2569 storage_ix,
2570 storage,
2571 );
2572 BrotliWriteBits(distnumextra as u8, distextra as (u64), storage_ix, storage);
2573 }
2574 }
2575 i = i.wrapping_add(1);
2576 }
2577}
2578
2579fn nop<'a>(_data: &[interface::Command<InputReference>]) {}
2580pub fn BrotliStoreMetaBlockTrivial<Alloc: BrotliAlloc, Cb>(
2581 alloc: &mut Alloc,
2582 input: &[u8],
2583 start_pos: usize,
2584 length: usize,
2585 mask: usize,
2586 is_last: i32,
2587 params: &BrotliEncoderParams,
2588 distance_cache: &[i32; kNumDistanceCacheEntries],
2589 commands: &[Command],
2590 n_commands: usize,
2591 recoder_state: &mut RecoderState,
2592 storage_ix: &mut usize,
2593 storage: &mut [u8],
2594 f: &mut Cb,
2595) where
2596 Cb: FnMut(
2597 &mut interface::PredictionModeContextMap<InputReferenceMut>,
2598 &mut [interface::StaticCommand],
2599 InputPair,
2600 &mut Alloc,
2601 ),
2602{
2603 let (input0, input1) = InputPairFromMaskedInput(input, start_pos, length, mask);
2604 if params.log_meta_block {
2605 LogMetaBlock(
2606 alloc,
2607 commands.split_at(n_commands).0,
2608 input0,
2609 input1,
2610 distance_cache,
2611 recoder_state,
2612 block_split_nop(),
2613 params,
2614 Some(ContextType::CONTEXT_LSB6),
2615 f,
2616 );
2617 }
2618 let mut lit_histo: HistogramLiteral = HistogramLiteral::default();
2619 let mut cmd_histo: HistogramCommand = HistogramCommand::default();
2620 let mut dist_histo: HistogramDistance = HistogramDistance::default();
2621 let mut lit_depth: [u8; 256] = [0; 256];
2622 let mut lit_bits: [u16; 256] = [0; 256];
2623 let mut cmd_depth: [u8; 704] = [0; 704];
2624 let mut cmd_bits: [u16; 704] = [0; 704];
2625 let mut dist_depth: [u8; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE] =
2626 [0; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE];
2627 let mut dist_bits: [u16; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE] =
2628 [0; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE];
2629 const MAX_HUFFMAN_TREE_SIZE: usize = (2i32 * 704i32 + 1i32) as usize;
2630 let mut tree: [HuffmanTree; MAX_HUFFMAN_TREE_SIZE] = [HuffmanTree {
2631 total_count_: 0,
2632 index_left_: 0,
2633 index_right_or_value_: 0,
2634 }; MAX_HUFFMAN_TREE_SIZE];
2635 let num_distance_symbols = params.dist.alphabet_size;
2636 StoreCompressedMetaBlockHeader(is_last, length, storage_ix, storage);
2637 BuildHistograms(
2638 input,
2639 start_pos,
2640 mask,
2641 commands,
2642 n_commands,
2643 &mut lit_histo,
2644 &mut cmd_histo,
2645 &mut dist_histo,
2646 );
2647 BrotliWriteBits(13, 0, storage_ix, storage);
2648 BuildAndStoreHuffmanTree(
2649 lit_histo.slice_mut(),
2650 BROTLI_NUM_LITERAL_SYMBOLS,
2651 BROTLI_NUM_LITERAL_SYMBOLS,
2652 &mut tree[..],
2653 &mut lit_depth[..],
2654 &mut lit_bits[..],
2655 storage_ix,
2656 storage,
2657 );
2658 BuildAndStoreHuffmanTree(
2659 cmd_histo.slice_mut(),
2660 BROTLI_NUM_COMMAND_SYMBOLS,
2661 BROTLI_NUM_COMMAND_SYMBOLS,
2662 &mut tree[..],
2663 &mut cmd_depth[..],
2664 &mut cmd_bits[..],
2665 storage_ix,
2666 storage,
2667 );
2668 BuildAndStoreHuffmanTree(
2669 dist_histo.slice_mut(),
2670 MAX_SIMPLE_DISTANCE_ALPHABET_SIZE,
2671 num_distance_symbols as usize,
2672 &mut tree[..],
2673 &mut dist_depth[..],
2674 &mut dist_bits[..],
2675 storage_ix,
2676 storage,
2677 );
2678 StoreDataWithHuffmanCodes(
2679 input,
2680 start_pos,
2681 mask,
2682 commands,
2683 n_commands,
2684 &mut lit_depth[..],
2685 &mut lit_bits[..],
2686 &mut cmd_depth[..],
2687 &mut cmd_bits[..],
2688 &mut dist_depth[..],
2689 &mut dist_bits[..],
2690 storage_ix,
2691 storage,
2692 );
2693 if is_last != 0 {
2694 JumpToByteBoundary(storage_ix, storage);
2695 }
2696}
2697
2698fn StoreStaticCommandHuffmanTree(storage_ix: &mut usize, storage: &mut [u8]) {
2699 BrotliWriteBits(
2700 56,
2701 0x926244u32 as (u64) << 32 | 0x16307003,
2702 storage_ix,
2703 storage,
2704 );
2705 BrotliWriteBits(3, 0x0u64, storage_ix, storage);
2706}
2707
2708fn StoreStaticDistanceHuffmanTree(storage_ix: &mut usize, storage: &mut [u8]) {
2709 BrotliWriteBits(28, 0x369dc03u64, storage_ix, storage);
2710}
2711
2712struct BlockSplitRef<'a> {
2713 types: &'a [u8],
2714 lengths: &'a [u32],
2715 num_types: u32,
2716}
2717
2718impl<'a> Default for BlockSplitRef<'a> {
2719 fn default() -> Self {
2720 BlockSplitRef {
2721 types: &[],
2722 lengths: &[],
2723 num_types: 1,
2724 }
2725 }
2726}
2727
2728#[derive(Default)]
2729struct MetaBlockSplitRefs<'a> {
2730 btypel: BlockSplitRef<'a>,
2731 literal_context_map: &'a [u32],
2732 btypec: BlockSplitRef<'a>,
2733 btyped: BlockSplitRef<'a>,
2734 distance_context_map: &'a [u32],
2735}
2736
2737fn block_split_nop() -> MetaBlockSplitRefs<'static> {
2738 MetaBlockSplitRefs::default()
2739}
2740
2741fn block_split_reference<'a, Alloc: BrotliAlloc>(
2742 mb: &'a MetaBlockSplit<Alloc>,
2743) -> MetaBlockSplitRefs<'a> {
2744 return MetaBlockSplitRefs::<'a> {
2745 btypel: BlockSplitRef {
2746 types: mb
2747 .literal_split
2748 .types
2749 .slice()
2750 .split_at(mb.literal_split.num_blocks)
2751 .0,
2752 lengths: mb
2753 .literal_split
2754 .lengths
2755 .slice()
2756 .split_at(mb.literal_split.num_blocks)
2757 .0,
2758 num_types: mb.literal_split.num_types as u32,
2759 },
2760 literal_context_map: mb
2761 .literal_context_map
2762 .slice()
2763 .split_at(mb.literal_context_map_size)
2764 .0,
2765 btypec: BlockSplitRef {
2766 types: mb
2767 .command_split
2768 .types
2769 .slice()
2770 .split_at(mb.command_split.num_blocks)
2771 .0,
2772 lengths: mb
2773 .command_split
2774 .lengths
2775 .slice()
2776 .split_at(mb.command_split.num_blocks)
2777 .0,
2778 num_types: mb.command_split.num_types as u32,
2779 },
2780 btyped: BlockSplitRef {
2781 types: mb
2782 .distance_split
2783 .types
2784 .slice()
2785 .split_at(mb.distance_split.num_blocks)
2786 .0,
2787 lengths: mb
2788 .distance_split
2789 .lengths
2790 .slice()
2791 .split_at(mb.distance_split.num_blocks)
2792 .0,
2793 num_types: mb.distance_split.num_types as u32,
2794 },
2795 distance_context_map: mb
2796 .distance_context_map
2797 .slice()
2798 .split_at(mb.distance_context_map_size)
2799 .0,
2800 };
2801}
2802
2803#[derive(Clone, Copy, Default)]
2804pub struct RecoderState {
2805 pub num_bytes_encoded: usize,
2806}
2807
2808impl RecoderState {
2809 pub fn new() -> Self {
2810 Self::default()
2811 }
2812}
2813
2814pub fn BrotliStoreMetaBlockFast<Cb, Alloc: BrotliAlloc>(
2815 m: &mut Alloc,
2816 input: &[u8],
2817 start_pos: usize,
2818 length: usize,
2819 mask: usize,
2820 is_last: i32,
2821 params: &BrotliEncoderParams,
2822 dist_cache: &[i32; kNumDistanceCacheEntries],
2823 commands: &[Command],
2824 n_commands: usize,
2825 recoder_state: &mut RecoderState,
2826 storage_ix: &mut usize,
2827 storage: &mut [u8],
2828 cb: &mut Cb,
2829) where
2830 Cb: FnMut(
2831 &mut interface::PredictionModeContextMap<InputReferenceMut>,
2832 &mut [interface::StaticCommand],
2833 InputPair,
2834 &mut Alloc,
2835 ),
2836{
2837 let (input0, input1) = InputPairFromMaskedInput(input, start_pos, length, mask);
2838 if params.log_meta_block {
2839 LogMetaBlock(
2840 m,
2841 commands.split_at(n_commands).0,
2842 input0,
2843 input1,
2844 dist_cache,
2845 recoder_state,
2846 block_split_nop(),
2847 params,
2848 Some(ContextType::CONTEXT_LSB6),
2849 cb,
2850 );
2851 }
2852 let num_distance_symbols = params.dist.alphabet_size;
2853 let distance_alphabet_bits = Log2FloorNonZero(u64::from(num_distance_symbols) - 1) + 1;
2854 StoreCompressedMetaBlockHeader(is_last, length, storage_ix, storage);
2855 BrotliWriteBits(13, 0, storage_ix, storage);
2856 if n_commands <= 128usize {
2857 let mut histogram: [u32; 256] = [0; 256];
2858 let mut pos: usize = start_pos;
2859 let mut num_literals: usize = 0usize;
2860 let mut i: usize;
2861 let mut lit_depth: [u8; 256] = [0; 256];
2862 let mut lit_bits: [u16; 256] = [0; 256];
2863 i = 0usize;
2864 while i < n_commands {
2865 {
2866 let cmd: Command = commands[i];
2867 let mut j: usize;
2868 j = cmd.insert_len_ as usize;
2869 while j != 0usize {
2870 {
2871 {
2872 let _rhs = 1;
2873 let _lhs = &mut histogram[input[(pos & mask)] as usize];
2874 *_lhs = (*_lhs).wrapping_add(_rhs as u32);
2875 }
2876 pos = pos.wrapping_add(1);
2877 }
2878 j = j.wrapping_sub(1);
2879 }
2880 num_literals = num_literals.wrapping_add(cmd.insert_len_ as usize);
2881 pos = pos.wrapping_add(CommandCopyLen(&cmd) as usize);
2882 }
2883 i = i.wrapping_add(1);
2884 }
2885 BrotliBuildAndStoreHuffmanTreeFast(
2886 m,
2887 &mut histogram[..],
2888 num_literals,
2889 8usize,
2890 &mut lit_depth[..],
2891 &mut lit_bits[..],
2892 storage_ix,
2893 storage,
2894 );
2895 StoreStaticCommandHuffmanTree(storage_ix, storage);
2896 StoreStaticDistanceHuffmanTree(storage_ix, storage);
2897 StoreDataWithHuffmanCodes(
2898 input,
2899 start_pos,
2900 mask,
2901 commands,
2902 n_commands,
2903 &mut lit_depth[..],
2904 &mut lit_bits[..],
2905 &kStaticCommandCodeDepth[..],
2906 &kStaticCommandCodeBits[..],
2907 &kStaticDistanceCodeDepth[..],
2908 &kStaticDistanceCodeBits[..],
2909 storage_ix,
2910 storage,
2911 );
2912 } else {
2913 let mut lit_histo: HistogramLiteral = HistogramLiteral::default();
2914 let mut cmd_histo: HistogramCommand = HistogramCommand::default();
2915 let mut dist_histo: HistogramDistance = HistogramDistance::default();
2916 let mut lit_depth: [u8; 256] = [0; 256];
2917 let mut lit_bits: [u16; 256] = [0; 256];
2918 let mut cmd_depth: [u8; 704] = [0; 704];
2919 let mut cmd_bits: [u16; 704] = [0; 704];
2920 let mut dist_depth: [u8; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE] =
2921 [0; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE];
2922 let mut dist_bits: [u16; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE] =
2923 [0; MAX_SIMPLE_DISTANCE_ALPHABET_SIZE];
2924 BuildHistograms(
2925 input,
2926 start_pos,
2927 mask,
2928 commands,
2929 n_commands,
2930 &mut lit_histo,
2931 &mut cmd_histo,
2932 &mut dist_histo,
2933 );
2934 BrotliBuildAndStoreHuffmanTreeFast(
2935 m,
2936 lit_histo.slice(),
2937 lit_histo.total_count_,
2938 8usize,
2939 &mut lit_depth[..],
2940 &mut lit_bits[..],
2941 storage_ix,
2942 storage,
2943 );
2944 BrotliBuildAndStoreHuffmanTreeFast(
2945 m,
2946 cmd_histo.slice(),
2947 cmd_histo.total_count_,
2948 10usize,
2949 &mut cmd_depth[..],
2950 &mut cmd_bits[..],
2951 storage_ix,
2952 storage,
2953 );
2954 BrotliBuildAndStoreHuffmanTreeFast(
2955 m,
2956 dist_histo.slice(),
2957 dist_histo.total_count_,
2958 distance_alphabet_bits as usize,
2959 &mut dist_depth[..],
2960 &mut dist_bits[..],
2961 storage_ix,
2962 storage,
2963 );
2964 StoreDataWithHuffmanCodes(
2965 input,
2966 start_pos,
2967 mask,
2968 commands,
2969 n_commands,
2970 &mut lit_depth[..],
2971 &mut lit_bits[..],
2972 &mut cmd_depth[..],
2973 &mut cmd_bits[..],
2974 &mut dist_depth[..],
2975 &mut dist_bits[..],
2976 storage_ix,
2977 storage,
2978 );
2979 }
2980 if is_last != 0 {
2981 JumpToByteBoundary(storage_ix, storage);
2982 }
2983}
2984fn BrotliStoreUncompressedMetaBlockHeader(
2985 length: usize,
2986 storage_ix: &mut usize,
2987 storage: &mut [u8],
2988) {
2989 let mut lenbits: u64 = 0;
2990 let mut nlenbits: u32 = 0;
2991 let mut nibblesbits: u32 = 0;
2992 BrotliWriteBits(1, 0, storage_ix, storage);
2993 BrotliEncodeMlen(length as u32, &mut lenbits, &mut nlenbits, &mut nibblesbits);
2994 BrotliWriteBits(2, nibblesbits as u64, storage_ix, storage);
2995 BrotliWriteBits(nlenbits as u8, lenbits, storage_ix, storage);
2996 BrotliWriteBits(1, 1, storage_ix, storage);
2997}
2998
2999fn InputPairFromMaskedInput(
3000 input: &[u8],
3001 position: usize,
3002 len: usize,
3003 mask: usize,
3004) -> (&[u8], &[u8]) {
3005 let masked_pos: usize = position & mask;
3006 if masked_pos.wrapping_add(len) > mask.wrapping_add(1) {
3007 let len1: usize = mask.wrapping_add(1).wrapping_sub(masked_pos);
3008 return (
3009 &input[masked_pos..(masked_pos + len1)],
3010 &input[0..len.wrapping_sub(len1)],
3011 );
3012 }
3013 (&input[masked_pos..masked_pos + len], &[])
3014}
3015pub fn BrotliStoreUncompressedMetaBlock<Cb, Alloc: BrotliAlloc>(
3016 alloc: &mut Alloc,
3017 is_final_block: i32,
3018 input: &[u8],
3019 position: usize,
3020 mask: usize,
3021 params: &BrotliEncoderParams,
3022 len: usize,
3023 recoder_state: &mut RecoderState,
3024 storage_ix: &mut usize,
3025 storage: &mut [u8],
3026 suppress_meta_block_logging: bool,
3027 cb: &mut Cb,
3028) where
3029 Cb: FnMut(
3030 &mut interface::PredictionModeContextMap<InputReferenceMut>,
3031 &mut [interface::StaticCommand],
3032 InputPair,
3033 &mut Alloc,
3034 ),
3035{
3036 let (input0, input1) = InputPairFromMaskedInput(input, position, len, mask);
3037 BrotliStoreUncompressedMetaBlockHeader(len, storage_ix, storage);
3038 JumpToByteBoundary(storage_ix, storage);
3039 let dst_start0 = (*storage_ix >> 3);
3040 storage[dst_start0..(dst_start0 + input0.len())].clone_from_slice(input0);
3041 *storage_ix = storage_ix.wrapping_add(input0.len() << 3);
3042 let dst_start1 = (*storage_ix >> 3);
3043 storage[dst_start1..(dst_start1 + input1.len())].clone_from_slice(input1);
3044 *storage_ix = storage_ix.wrapping_add(input1.len() << 3);
3045 BrotliWriteBitsPrepareStorage(*storage_ix, storage);
3046 if params.log_meta_block && !suppress_meta_block_logging {
3047 let cmds = [Command {
3048 insert_len_: len as u32,
3049 copy_len_: 0,
3050 dist_extra_: 0,
3051 cmd_prefix_: 0,
3052 dist_prefix_: 0,
3053 }];
3054
3055 LogMetaBlock(
3056 alloc,
3057 &cmds,
3058 input0,
3059 input1,
3060 &[0, 0, 0, 0],
3061 recoder_state,
3062 block_split_nop(),
3063 params,
3064 None,
3065 cb,
3066 );
3067 }
3068 if is_final_block != 0 {
3069 BrotliWriteBits(1u8, 1u64, storage_ix, storage);
3070 BrotliWriteBits(1u8, 1u64, storage_ix, storage);
3071 JumpToByteBoundary(storage_ix, storage);
3072 }
3073}
3074
3075pub fn BrotliStoreSyncMetaBlock(storage_ix: &mut usize, storage: &mut [u8]) {
3076 BrotliWriteBits(6, 6, storage_ix, storage);
3077 JumpToByteBoundary(storage_ix, storage);
3078}
3079
3080pub fn BrotliWriteEmptyLastMetaBlock(storage_ix: &mut usize, storage: &mut [u8]) {
3081 BrotliWriteBits(1u8, 1u64, storage_ix, storage);
3082 BrotliWriteBits(1u8, 1u64, storage_ix, storage);
3083 JumpToByteBoundary(storage_ix, storage);
3084}
3085
3086const MAX_SIZE_ENCODING: usize = 10;
3087
3088fn encode_base_128(mut value: u64) -> (usize, [u8; MAX_SIZE_ENCODING]) {
3089 let mut ret = [0u8; MAX_SIZE_ENCODING];
3090 for index in 0..ret.len() {
3091 ret[index] = (value & 0x7f) as u8;
3092 value >>= 7;
3093 if value != 0 {
3094 ret[index] |= 0x80;
3095 } else {
3096 return (index + 1, ret);
3097 }
3098 }
3099 (ret.len(), ret)
3100}
3101
3102pub fn BrotliWriteMetadataMetaBlock(
3103 params: &BrotliEncoderParams,
3104 storage_ix: &mut usize,
3105 storage: &mut [u8],
3106) {
3107 BrotliWriteBits(1u8, 0u64, storage_ix, storage); BrotliWriteBits(2u8, 3u64, storage_ix, storage); BrotliWriteBits(1u8, 0u64, storage_ix, storage); BrotliWriteBits(2u8, 1u64, storage_ix, storage); let (size_hint_count, size_hint_b128) = encode_base_128(params.size_hint as u64);
3112
3113 BrotliWriteBits(8u8, 3 + size_hint_count as u64, storage_ix, storage); JumpToByteBoundary(storage_ix, storage);
3115 let magic_number: [u8; 3] = if params.catable && !params.use_dictionary {
3116 [0xe1, 0x97, 0x81]
3117 } else if params.appendable {
3118 [0xe1, 0x97, 0x82]
3119 } else {
3120 [0xe1, 0x97, 0x80]
3121 };
3122 for magic in magic_number.iter() {
3123 BrotliWriteBits(8u8, u64::from(*magic), storage_ix, storage);
3124 }
3125 BrotliWriteBits(8u8, u64::from(VERSION), storage_ix, storage);
3126 for sh in size_hint_b128[..size_hint_count].iter() {
3127 BrotliWriteBits(8u8, u64::from(*sh), storage_ix, storage);
3128 }
3129}
3130
3131mod test {
3132 use super::{encode_base_128, MAX_SIZE_ENCODING};
3133 #[test]
3134 fn test_encode_base_128() {
3135 assert_eq!(encode_base_128(0), (1, [0u8; MAX_SIZE_ENCODING]));
3136 assert_eq!(encode_base_128(1), (1, [1, 0, 0, 0, 0, 0, 0, 0, 0, 0]));
3137 assert_eq!(encode_base_128(127), (1, [0x7f, 0, 0, 0, 0, 0, 0, 0, 0, 0]));
3138 assert_eq!(
3139 encode_base_128(128),
3140 (2, [0x80, 0x1, 0, 0, 0, 0, 0, 0, 0, 0])
3141 );
3142 assert_eq!(
3143 encode_base_128(16383),
3144 (2, [0xff, 0x7f, 0, 0, 0, 0, 0, 0, 0, 0])
3145 );
3146 assert_eq!(
3147 encode_base_128(16384),
3148 (3, [0x80, 0x80, 0x1, 0, 0, 0, 0, 0, 0, 0])
3149 );
3150 assert_eq!(
3151 encode_base_128(2097151),
3152 (3, [0xff, 0xff, 0x7f, 0, 0, 0, 0, 0, 0, 0])
3153 );
3154 assert_eq!(
3155 encode_base_128(2097152),
3156 (4, [0x80, 0x80, 0x80, 0x1, 0, 0, 0, 0, 0, 0])
3157 );
3158 assert_eq!(
3159 encode_base_128(4194303),
3160 (4, [0xff, 0xff, 0xff, 0x1, 0, 0, 0, 0, 0, 0])
3161 );
3162 assert_eq!(
3163 encode_base_128(4294967295),
3164 (5, [0xff, 0xff, 0xff, 0xff, 0xf, 0, 0, 0, 0, 0])
3165 );
3166 assert_eq!(
3167 encode_base_128(4294967296),
3168 (5, [0x80, 0x80, 0x80, 0x80, 0x10, 0, 0, 0, 0, 0])
3169 );
3170 assert_eq!(
3171 encode_base_128(9223372036854775808),
3172 (
3173 10,
3174 [0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x1]
3175 )
3176 );
3177 assert_eq!(
3178 encode_base_128(18446744073709551615),
3179 (
3180 10,
3181 [0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x1]
3182 )
3183 );
3184 }
3185}