tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

encode.c (78417B)


      1 /* Copyright 2013 Google Inc. All Rights Reserved.
      2 
      3   Distributed under MIT license.
      4   See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
      5 */
      6 
      7 /* Implementation of Brotli compressor. */
      8 
      9 #include <brotli/encode.h>
     10 
     11 #include "../common/constants.h"
     12 #include "../common/context.h"
     13 #include "../common/platform.h"
     14 #include <brotli/shared_dictionary.h>
     15 #include "../common/version.h"
     16 #include "backward_references_hq.h"
     17 #include "backward_references.h"
     18 #include "bit_cost.h"
     19 #include "brotli_bit_stream.h"
     20 #include "command.h"
     21 #include "compound_dictionary.h"
     22 #include "compress_fragment_two_pass.h"
     23 #include "compress_fragment.h"
     24 #include "dictionary_hash.h"
     25 #include "encoder_dict.h"
     26 #include "fast_log.h"
     27 #include "hash.h"
     28 #include "histogram.h"
     29 #include "memory.h"
     30 #include "metablock.h"
     31 #include "params.h"
     32 #include "quality.h"
     33 #include "ringbuffer.h"
     34 #include "state.h"
     35 #include "static_init.h"
     36 #include "utf8_util.h"
     37 #include "write_bits.h"
     38 
     39 #if defined(__cplusplus) || defined(c_plusplus)
     40 extern "C" {
     41 #endif
     42 
     43 #define COPY_ARRAY(dst, src) memcpy(dst, src, sizeof(src));
     44 
     45 static size_t InputBlockSize(BrotliEncoderState* s) {
     46  return (size_t)1 << s->params.lgblock;
     47 }
     48 
     49 static uint64_t UnprocessedInputSize(BrotliEncoderState* s) {
     50  return s->input_pos_ - s->last_processed_pos_;
     51 }
     52 
     53 static size_t RemainingInputBlockSize(BrotliEncoderState* s) {
     54  const uint64_t delta = UnprocessedInputSize(s);
     55  size_t block_size = InputBlockSize(s);
     56  if (delta >= block_size) return 0;
     57  return block_size - (size_t)delta;
     58 }
     59 
     60 BROTLI_BOOL BrotliEncoderSetParameter(
     61    BrotliEncoderState* state, BrotliEncoderParameter p, uint32_t value) {
     62  /* Changing parameters on the fly is not implemented yet. */
     63  if (state->is_initialized_) return BROTLI_FALSE;
     64  /* TODO(eustas): Validate/clamp parameters here. */
     65  switch (p) {
     66    case BROTLI_PARAM_MODE:
     67      state->params.mode = (BrotliEncoderMode)value;
     68      return BROTLI_TRUE;
     69 
     70    case BROTLI_PARAM_QUALITY:
     71      state->params.quality = (int)value;
     72      return BROTLI_TRUE;
     73 
     74    case BROTLI_PARAM_LGWIN:
     75      state->params.lgwin = (int)value;
     76      return BROTLI_TRUE;
     77 
     78    case BROTLI_PARAM_LGBLOCK:
     79      state->params.lgblock = (int)value;
     80      return BROTLI_TRUE;
     81 
     82    case BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING:
     83      if ((value != 0) && (value != 1)) return BROTLI_FALSE;
     84      state->params.disable_literal_context_modeling = TO_BROTLI_BOOL(!!value);
     85      return BROTLI_TRUE;
     86 
     87    case BROTLI_PARAM_SIZE_HINT:
     88      state->params.size_hint = value;
     89      return BROTLI_TRUE;
     90 
     91    case BROTLI_PARAM_LARGE_WINDOW:
     92      state->params.large_window = TO_BROTLI_BOOL(!!value);
     93      return BROTLI_TRUE;
     94 
     95    case BROTLI_PARAM_NPOSTFIX:
     96      state->params.dist.distance_postfix_bits = value;
     97      return BROTLI_TRUE;
     98 
     99    case BROTLI_PARAM_NDIRECT:
    100      state->params.dist.num_direct_distance_codes = value;
    101      return BROTLI_TRUE;
    102 
    103    case BROTLI_PARAM_STREAM_OFFSET:
    104      if (value > (1u << 30)) return BROTLI_FALSE;
    105      state->params.stream_offset = value;
    106      return BROTLI_TRUE;
    107 
    108    default: return BROTLI_FALSE;
    109  }
    110 }
    111 
    112 /* Wraps 64-bit input position to 32-bit ring-buffer position preserving
    113   "not-a-first-lap" feature. */
    114 static uint32_t WrapPosition(uint64_t position) {
    115  uint32_t result = (uint32_t)position;
    116  uint64_t gb = position >> 30;
    117  if (gb > 2) {
    118    /* Wrap every 2GiB; The first 3GB are continuous. */
    119    result = (result & ((1u << 30) - 1)) | ((uint32_t)((gb - 1) & 1) + 1) << 30;
    120  }
    121  return result;
    122 }
    123 
    124 static uint8_t* GetBrotliStorage(BrotliEncoderState* s, size_t size) {
    125  MemoryManager* m = &s->memory_manager_;
    126  if (s->storage_size_ < size) {
    127    BROTLI_FREE(m, s->storage_);
    128    s->storage_ = BROTLI_ALLOC(m, uint8_t, size);
    129    if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(s->storage_)) return NULL;
    130    s->storage_size_ = size;
    131  }
    132  return s->storage_;
    133 }
    134 
    135 static size_t HashTableSize(size_t max_table_size, size_t input_size) {
    136  size_t htsize = 256;
    137  while (htsize < max_table_size && htsize < input_size) {
    138    htsize <<= 1;
    139  }
    140  return htsize;
    141 }
    142 
    143 static int* GetHashTable(BrotliEncoderState* s, int quality,
    144                         size_t input_size, size_t* table_size) {
    145  /* Use smaller hash table when input.size() is smaller, since we
    146     fill the table, incurring O(hash table size) overhead for
    147     compression, and if the input is short, we won't need that
    148     many hash table entries anyway. */
    149  MemoryManager* m = &s->memory_manager_;
    150  const size_t max_table_size = MaxHashTableSize(quality);
    151  size_t htsize = HashTableSize(max_table_size, input_size);
    152  int* table;
    153  BROTLI_DCHECK(max_table_size >= 256);
    154  if (quality == FAST_ONE_PASS_COMPRESSION_QUALITY) {
    155    /* Only odd shifts are supported by fast-one-pass. */
    156    if ((htsize & 0xAAAAA) == 0) {
    157      htsize <<= 1;
    158    }
    159  }
    160 
    161  if (htsize <= sizeof(s->small_table_) / sizeof(s->small_table_[0])) {
    162    table = s->small_table_;
    163  } else {
    164    if (htsize > s->large_table_size_) {
    165      s->large_table_size_ = htsize;
    166      BROTLI_FREE(m, s->large_table_);
    167      s->large_table_ = BROTLI_ALLOC(m, int, htsize);
    168      if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(s->large_table_)) return 0;
    169    }
    170    table = s->large_table_;
    171  }
    172 
    173  *table_size = htsize;
    174  memset(table, 0, htsize * sizeof(*table));
    175  return table;
    176 }
    177 
    178 static void EncodeWindowBits(int lgwin, BROTLI_BOOL large_window,
    179    uint16_t* last_bytes, uint8_t* last_bytes_bits) {
    180  if (large_window) {
    181    *last_bytes = (uint16_t)(((lgwin & 0x3F) << 8) | 0x11);
    182    *last_bytes_bits = 14;
    183  } else {
    184    if (lgwin == 16) {
    185      *last_bytes = 0;
    186      *last_bytes_bits = 1;
    187    } else if (lgwin == 17) {
    188      *last_bytes = 1;
    189      *last_bytes_bits = 7;
    190    } else if (lgwin > 17) {
    191      *last_bytes = (uint16_t)(((lgwin - 17) << 1) | 0x01);
    192      *last_bytes_bits = 4;
    193    } else {
    194      *last_bytes = (uint16_t)(((lgwin - 8) << 4) | 0x01);
    195      *last_bytes_bits = 7;
    196    }
    197  }
    198 }
    199 
    200 /* TODO(eustas): move to compress_fragment.c? */
    201 /* Initializes the command and distance prefix codes for the first block. */
    202 static void InitCommandPrefixCodes(BrotliOnePassArena* s) {
    203  static const BROTLI_MODEL("small") uint8_t kDefaultCommandDepths[128] = {
    204    0, 4, 4, 5, 6, 6, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8,
    205    0, 0, 0, 4, 4, 4, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7,
    206    7, 7, 10, 10, 10, 10, 10, 10, 0, 4, 4, 5, 5, 5, 6, 6,
    207    7, 8, 8, 9, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
    208    5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
    209    6, 6, 6, 6, 6, 6, 5, 5, 5, 5, 5, 5, 4, 4, 4, 4,
    210    4, 4, 4, 5, 5, 5, 5, 5, 5, 6, 6, 7, 7, 7, 8, 10,
    211    12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
    212  };
    213  static const BROTLI_MODEL("small") uint16_t kDefaultCommandBits[128] = {
    214    0,   0,   8,   9,   3,  35,   7,   71,
    215    39, 103,  23,  47, 175, 111, 239,   31,
    216    0,   0,   0,   4,  12,   2,  10,    6,
    217    13,  29,  11,  43,  27,  59,  87,   55,
    218    15,  79, 319, 831, 191, 703, 447,  959,
    219    0,  14,   1,  25,   5,  21,  19,   51,
    220    119, 159,  95, 223, 479, 991,  63,  575,
    221    127, 639, 383, 895, 255, 767, 511, 1023,
    222    14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
    223    27, 59, 7, 39, 23, 55, 30, 1, 17, 9, 25, 5, 0, 8, 4, 12,
    224    2, 10, 6, 21, 13, 29, 3, 19, 11, 15, 47, 31, 95, 63, 127, 255,
    225    767, 2815, 1791, 3839, 511, 2559, 1535, 3583, 1023, 3071, 2047, 4095,
    226  };
    227  static const BROTLI_MODEL("small") uint8_t kDefaultCommandCode[] = {
    228    0xff, 0x77, 0xd5, 0xbf, 0xe7, 0xde, 0xea, 0x9e, 0x51, 0x5d, 0xde, 0xc6,
    229    0x70, 0x57, 0xbc, 0x58, 0x58, 0x58, 0xd8, 0xd8, 0x58, 0xd5, 0xcb, 0x8c,
    230    0xea, 0xe0, 0xc3, 0x87, 0x1f, 0x83, 0xc1, 0x60, 0x1c, 0x67, 0xb2, 0xaa,
    231    0x06, 0x83, 0xc1, 0x60, 0x30, 0x18, 0xcc, 0xa1, 0xce, 0x88, 0x54, 0x94,
    232    0x46, 0xe1, 0xb0, 0xd0, 0x4e, 0xb2, 0xf7, 0x04, 0x00,
    233  };
    234  static const size_t kDefaultCommandCodeNumBits = 448;
    235  COPY_ARRAY(s->cmd_depth, kDefaultCommandDepths);
    236  COPY_ARRAY(s->cmd_bits, kDefaultCommandBits);
    237 
    238  /* Initialize the pre-compressed form of the command and distance prefix
    239     codes. */
    240  COPY_ARRAY(s->cmd_code, kDefaultCommandCode);
    241  s->cmd_code_numbits = kDefaultCommandCodeNumBits;
    242 }
    243 
    244 /* TODO(eustas): avoid FP calculations. */
    245 static double EstimateEntropy(const uint32_t* population, size_t size) {
    246  size_t total = 0;
    247  double result = 0;
    248  for (size_t i = 0; i < size; ++i) {
    249    uint32_t p = population[i];
    250    total += p;
    251    result += (double)p * FastLog2(p);
    252  }
    253  result = (double)total * FastLog2(total) - result;
    254  return result;
    255 }
    256 
    257 /* Decide about the context map based on the ability of the prediction
    258   ability of the previous byte UTF8-prefix on the next byte. The
    259   prediction ability is calculated as Shannon entropy. Here we need
    260   Shannon entropy instead of 'BrotliBitsEntropy' since the prefix will be
    261   encoded with the remaining 6 bits of the following byte, and
    262   BrotliBitsEntropy will assume that symbol to be stored alone using Huffman
    263   coding. */
    264 static void ChooseContextMap(int quality,
    265                             uint32_t* bigram_histo,
    266                             size_t* num_literal_contexts,
    267                             const uint32_t** literal_context_map) {
    268  static const BROTLI_MODEL("small")
    269  uint32_t kStaticContextMapContinuation[64] = {
    270    1, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
    271    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
    272    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
    273    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
    274  };
    275  static const BROTLI_MODEL("small")
    276  uint32_t kStaticContextMapSimpleUTF8[64] = {
    277    0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
    278    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
    279    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
    280    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
    281  };
    282 
    283  uint32_t monogram_histo[3] = { 0 };
    284  uint32_t two_prefix_histo[6] = { 0 };
    285  size_t total;
    286  size_t i;
    287  double entropy[4];
    288  for (i = 0; i < 9; ++i) {
    289    monogram_histo[i % 3] += bigram_histo[i];
    290    two_prefix_histo[i % 6] += bigram_histo[i];
    291  }
    292  entropy[1] = EstimateEntropy(monogram_histo, 3);
    293  entropy[2] = (EstimateEntropy(two_prefix_histo, 3) +
    294                EstimateEntropy(two_prefix_histo + 3, 3));
    295  entropy[3] = 0;
    296  for (i = 0; i < 3; ++i) {
    297    entropy[3] += EstimateEntropy(bigram_histo + 3 * i, 3);
    298  }
    299 
    300  total = monogram_histo[0] + monogram_histo[1] + monogram_histo[2];
    301  BROTLI_DCHECK(total != 0);
    302  entropy[0] = 1.0 / (double)total;
    303  entropy[1] *= entropy[0];
    304  entropy[2] *= entropy[0];
    305  entropy[3] *= entropy[0];
    306 
    307  if (quality < MIN_QUALITY_FOR_HQ_CONTEXT_MODELING) {
    308    /* 3 context models is a bit slower, don't use it at lower qualities. */
    309    entropy[3] = entropy[1] * 10;
    310  }
    311  /* If expected savings by symbol are less than 0.2 bits, skip the
    312     context modeling -- in exchange for faster decoding speed. */
    313  if (entropy[1] - entropy[2] < 0.2 &&
    314      entropy[1] - entropy[3] < 0.2) {
    315    *num_literal_contexts = 1;
    316  } else if (entropy[2] - entropy[3] < 0.02) {
    317    *num_literal_contexts = 2;
    318    *literal_context_map = kStaticContextMapSimpleUTF8;
    319  } else {
    320    *num_literal_contexts = 3;
    321    *literal_context_map = kStaticContextMapContinuation;
    322  }
    323 }
    324 
    325 /* Decide if we want to use a more complex static context map containing 13
    326   context values, based on the entropy reduction of histograms over the
    327   first 5 bits of literals. */
    328 static BROTLI_BOOL ShouldUseComplexStaticContextMap(const uint8_t* input,
    329    size_t start_pos, size_t length, size_t mask, int quality, size_t size_hint,
    330    size_t* num_literal_contexts, const uint32_t** literal_context_map,
    331    uint32_t* arena) {
    332  static const BROTLI_MODEL("small")
    333  uint32_t kStaticContextMapComplexUTF8[64] = {
    334    11, 11, 12, 12, /* 0 special */
    335    0, 0, 0, 0, /* 4 lf */
    336    1, 1, 9, 9, /* 8 space */
    337    2, 2, 2, 2, /* !, first after space/lf and after something else. */
    338    1, 1, 1, 1, /* " */
    339    8, 3, 3, 3, /* % */
    340    1, 1, 1, 1, /* ({[ */
    341    2, 2, 2, 2, /* }]) */
    342    8, 4, 4, 4, /* :; */
    343    8, 7, 4, 4, /* . */
    344    8, 0, 0, 0, /* > */
    345    3, 3, 3, 3, /* [0..9] */
    346    5, 5, 10, 5, /* [A-Z] */
    347    5, 5, 10, 5,
    348    6, 6, 6, 6, /* [a-z] */
    349    6, 6, 6, 6,
    350  };
    351  BROTLI_UNUSED(quality);
    352  /* Try the more complex static context map only for long data. */
    353  if (size_hint < (1 << 20)) {
    354    return BROTLI_FALSE;
    355  } else {
    356    const size_t end_pos = start_pos + length;
    357    /* To make entropy calculations faster, we collect histograms
    358       over the 5 most significant bits of literals. One histogram
    359       without context and 13 additional histograms for each context value. */
    360    uint32_t* BROTLI_RESTRICT const combined_histo = arena;
    361    uint32_t* BROTLI_RESTRICT const context_histo = arena + 32;
    362    uint32_t total = 0;
    363    double entropy[3];
    364    size_t i;
    365    ContextLut utf8_lut = BROTLI_CONTEXT_LUT(CONTEXT_UTF8);
    366    memset(arena, 0, sizeof(arena[0]) * 32 * (BROTLI_MAX_STATIC_CONTEXTS + 1));
    367    for (; start_pos + 64 <= end_pos; start_pos += 4096) {
    368      const size_t stride_end_pos = start_pos + 64;
    369      uint8_t prev2 = input[start_pos & mask];
    370      uint8_t prev1 = input[(start_pos + 1) & mask];
    371      size_t pos;
    372      /* To make the analysis of the data faster we only examine 64 byte long
    373         strides at every 4kB intervals. */
    374      for (pos = start_pos + 2; pos < stride_end_pos; ++pos) {
    375        const uint8_t literal = input[pos & mask];
    376        const uint8_t context = (uint8_t)kStaticContextMapComplexUTF8[
    377            BROTLI_CONTEXT(prev1, prev2, utf8_lut)];
    378        ++total;
    379        ++combined_histo[literal >> 3];
    380        ++context_histo[(context << 5) + (literal >> 3)];
    381        prev2 = prev1;
    382        prev1 = literal;
    383      }
    384    }
    385    entropy[1] = EstimateEntropy(combined_histo, 32);
    386    entropy[2] = 0;
    387    for (i = 0; i < BROTLI_MAX_STATIC_CONTEXTS; ++i) {
    388      entropy[2] += EstimateEntropy(context_histo + (i << 5), 32);
    389    }
    390    entropy[0] = 1.0 / (double)total;
    391    entropy[1] *= entropy[0];
    392    entropy[2] *= entropy[0];
    393    /* The triggering heuristics below were tuned by compressing the individual
    394       files of the silesia corpus. If we skip this kind of context modeling
    395       for not very well compressible input (i.e. entropy using context modeling
    396       is 60% of maximal entropy) or if expected savings by symbol are less
    397       than 0.2 bits, then in every case when it triggers, the final compression
    398       ratio is improved. Note however that this heuristics might be too strict
    399       for some cases and could be tuned further. */
    400    if (entropy[2] > 3.0 || entropy[1] - entropy[2] < 0.2) {
    401      return BROTLI_FALSE;
    402    } else {
    403      *num_literal_contexts = BROTLI_MAX_STATIC_CONTEXTS;
    404      *literal_context_map = kStaticContextMapComplexUTF8;
    405      return BROTLI_TRUE;
    406    }
    407  }
    408 }
    409 
    410 static void DecideOverLiteralContextModeling(const uint8_t* input,
    411    size_t start_pos, size_t length, size_t mask, int quality, size_t size_hint,
    412    size_t* num_literal_contexts, const uint32_t** literal_context_map,
    413    uint32_t* arena) {
    414  if (quality < MIN_QUALITY_FOR_CONTEXT_MODELING || length < 64) {
    415    return;
    416  } else if (ShouldUseComplexStaticContextMap(
    417      input, start_pos, length, mask, quality, size_hint,
    418      num_literal_contexts, literal_context_map, arena)) {
    419    /* Context map was already set, nothing else to do. */
    420  } else {
    421    /* Gather bi-gram data of the UTF8 byte prefixes. To make the analysis of
    422       UTF8 data faster we only examine 64 byte long strides at every 4kB
    423       intervals. */
    424    const size_t end_pos = start_pos + length;
    425    uint32_t* BROTLI_RESTRICT const bigram_prefix_histo = arena;
    426    memset(bigram_prefix_histo, 0, sizeof(arena[0]) * 9);
    427    for (; start_pos + 64 <= end_pos; start_pos += 4096) {
    428      static const int lut[4] = { 0, 0, 1, 2 };
    429      const size_t stride_end_pos = start_pos + 64;
    430      int prev = lut[input[start_pos & mask] >> 6] * 3;
    431      size_t pos;
    432      for (pos = start_pos + 1; pos < stride_end_pos; ++pos) {
    433        const uint8_t literal = input[pos & mask];
    434        ++bigram_prefix_histo[prev + lut[literal >> 6]];
    435        prev = lut[literal >> 6] * 3;
    436      }
    437    }
    438    ChooseContextMap(quality, &bigram_prefix_histo[0], num_literal_contexts,
    439                     literal_context_map);
    440  }
    441 }
    442 
    443 static BROTLI_BOOL ShouldCompress(
    444    const uint8_t* data, const size_t mask, const uint64_t last_flush_pos,
    445    const size_t bytes, const size_t num_literals, const size_t num_commands) {
    446  /* TODO(eustas): find more precise minimal block overhead. */
    447  if (bytes <= 2) return BROTLI_FALSE;
    448  if (num_commands < (bytes >> 8) + 2) {
    449    if ((double)num_literals > 0.99 * (double)bytes) {
    450      uint32_t literal_histo[256] = { 0 };
    451      static const uint32_t kSampleRate = 13;
    452      static const double kInvSampleRate = 1.0 / 13.0;
    453      static const double kMinEntropy = 7.92;
    454      const double bit_cost_threshold =
    455          (double)bytes * kMinEntropy * kInvSampleRate;
    456      size_t t = (bytes + kSampleRate - 1) / kSampleRate;
    457      uint32_t pos = (uint32_t)last_flush_pos;
    458      size_t i;
    459      for (i = 0; i < t; i++) {
    460        ++literal_histo[data[pos & mask]];
    461        pos += kSampleRate;
    462      }
    463      if (BrotliBitsEntropy(literal_histo, 256) > bit_cost_threshold) {
    464        return BROTLI_FALSE;
    465      }
    466    }
    467  }
    468  return BROTLI_TRUE;
    469 }
    470 
    471 /* Chooses the literal context mode for a metablock */
    472 static ContextType ChooseContextMode(const BrotliEncoderParams* params,
    473    const uint8_t* data, const size_t pos, const size_t mask,
    474    const size_t length) {
    475  /* We only do the computation for the option of something else than
    476     CONTEXT_UTF8 for the highest qualities */
    477  if (params->quality >= MIN_QUALITY_FOR_HQ_BLOCK_SPLITTING &&
    478      !BrotliIsMostlyUTF8(data, pos, mask, length, kMinUTF8Ratio)) {
    479    return CONTEXT_SIGNED;
    480  }
    481  return CONTEXT_UTF8;
    482 }
    483 
    484 static void WriteMetaBlockInternal(MemoryManager* m,
    485                                   const uint8_t* data,
    486                                   const size_t mask,
    487                                   const uint64_t last_flush_pos,
    488                                   const size_t bytes,
    489                                   const BROTLI_BOOL is_last,
    490                                   ContextType literal_context_mode,
    491                                   const BrotliEncoderParams* params,
    492                                   const uint8_t prev_byte,
    493                                   const uint8_t prev_byte2,
    494                                   const size_t num_literals,
    495                                   const size_t num_commands,
    496                                   Command* commands,
    497                                   const int* saved_dist_cache,
    498                                   int* dist_cache,
    499                                   size_t* storage_ix,
    500                                   uint8_t* storage) {
    501  const uint32_t wrapped_last_flush_pos = WrapPosition(last_flush_pos);
    502  uint16_t last_bytes;
    503  uint8_t last_bytes_bits;
    504  ContextLut literal_context_lut = BROTLI_CONTEXT_LUT(literal_context_mode);
    505  BrotliEncoderParams block_params = *params;
    506 
    507  if (bytes == 0) {
    508    /* Write the ISLAST and ISEMPTY bits. */
    509    BrotliWriteBits(2, 3, storage_ix, storage);
    510    *storage_ix = (*storage_ix + 7u) & ~7u;
    511    return;
    512  }
    513 
    514  if (!ShouldCompress(data, mask, last_flush_pos, bytes,
    515                      num_literals, num_commands)) {
    516    /* Restore the distance cache, as its last update by
    517       CreateBackwardReferences is now unused. */
    518    memcpy(dist_cache, saved_dist_cache, 4 * sizeof(dist_cache[0]));
    519    BrotliStoreUncompressedMetaBlock(is_last, data,
    520                                     wrapped_last_flush_pos, mask, bytes,
    521                                     storage_ix, storage);
    522    return;
    523  }
    524 
    525  BROTLI_DCHECK(*storage_ix <= 14);
    526  last_bytes = (uint16_t)((storage[1] << 8) | storage[0]);
    527  last_bytes_bits = (uint8_t)(*storage_ix);
    528  if (params->quality <= MAX_QUALITY_FOR_STATIC_ENTROPY_CODES) {
    529    BrotliStoreMetaBlockFast(m, data, wrapped_last_flush_pos,
    530                             bytes, mask, is_last, params,
    531                             commands, num_commands,
    532                             storage_ix, storage);
    533    if (BROTLI_IS_OOM(m)) return;
    534  } else if (params->quality < MIN_QUALITY_FOR_BLOCK_SPLIT) {
    535    BrotliStoreMetaBlockTrivial(m, data, wrapped_last_flush_pos,
    536                                bytes, mask, is_last, params,
    537                                commands, num_commands,
    538                                storage_ix, storage);
    539    if (BROTLI_IS_OOM(m)) return;
    540  } else {
    541    MetaBlockSplit mb;
    542    InitMetaBlockSplit(&mb);
    543    if (params->quality < MIN_QUALITY_FOR_HQ_BLOCK_SPLITTING) {
    544      size_t num_literal_contexts = 1;
    545      const uint32_t* literal_context_map = NULL;
    546      if (!params->disable_literal_context_modeling) {
    547        /* TODO(eustas): pull to higher level and reuse. */
    548        uint32_t* arena =
    549            BROTLI_ALLOC(m, uint32_t, 32 * (BROTLI_MAX_STATIC_CONTEXTS + 1));
    550        if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(arena)) return;
    551        DecideOverLiteralContextModeling(
    552            data, wrapped_last_flush_pos, bytes, mask, params->quality,
    553            params->size_hint, &num_literal_contexts,
    554            &literal_context_map, arena);
    555        BROTLI_FREE(m, arena);
    556      }
    557      BrotliBuildMetaBlockGreedy(m, data, wrapped_last_flush_pos, mask,
    558          prev_byte, prev_byte2, literal_context_lut, num_literal_contexts,
    559          literal_context_map, commands, num_commands, &mb);
    560      if (BROTLI_IS_OOM(m)) return;
    561    } else {
    562      BrotliBuildMetaBlock(m, data, wrapped_last_flush_pos, mask, &block_params,
    563                           prev_byte, prev_byte2,
    564                           commands, num_commands,
    565                           literal_context_mode,
    566                           &mb);
    567      if (BROTLI_IS_OOM(m)) return;
    568    }
    569    if (params->quality >= MIN_QUALITY_FOR_OPTIMIZE_HISTOGRAMS) {
    570      /* The number of distance symbols effectively used for distance
    571         histograms. It might be less than distance alphabet size
    572         for "Large Window Brotli" (32-bit). */
    573      BrotliOptimizeHistograms(block_params.dist.alphabet_size_limit, &mb);
    574    }
    575    BrotliStoreMetaBlock(m, data, wrapped_last_flush_pos, bytes, mask,
    576                         prev_byte, prev_byte2,
    577                         is_last,
    578                         &block_params,
    579                         literal_context_mode,
    580                         commands, num_commands,
    581                         &mb,
    582                         storage_ix, storage);
    583    if (BROTLI_IS_OOM(m)) return;
    584    DestroyMetaBlockSplit(m, &mb);
    585  }
    586  if (bytes + 4 < (*storage_ix >> 3)) {
    587    /* Restore the distance cache and last byte. */
    588    memcpy(dist_cache, saved_dist_cache, 4 * sizeof(dist_cache[0]));
    589    storage[0] = (uint8_t)last_bytes;
    590    storage[1] = (uint8_t)(last_bytes >> 8);
    591    *storage_ix = last_bytes_bits;
    592    BrotliStoreUncompressedMetaBlock(is_last, data,
    593                                     wrapped_last_flush_pos, mask,
    594                                     bytes, storage_ix, storage);
    595  }
    596 }
    597 
    598 static void ChooseDistanceParams(BrotliEncoderParams* params) {
    599  uint32_t distance_postfix_bits = 0;
    600  uint32_t num_direct_distance_codes = 0;
    601 
    602  if (params->quality >= MIN_QUALITY_FOR_NONZERO_DISTANCE_PARAMS) {
    603    uint32_t ndirect_msb;
    604    if (params->mode == BROTLI_MODE_FONT) {
    605      distance_postfix_bits = 1;
    606      num_direct_distance_codes = 12;
    607    } else {
    608      distance_postfix_bits = params->dist.distance_postfix_bits;
    609      num_direct_distance_codes = params->dist.num_direct_distance_codes;
    610    }
    611    ndirect_msb = (num_direct_distance_codes >> distance_postfix_bits) & 0x0F;
    612    if (distance_postfix_bits > BROTLI_MAX_NPOSTFIX ||
    613        num_direct_distance_codes > BROTLI_MAX_NDIRECT ||
    614        (ndirect_msb << distance_postfix_bits) != num_direct_distance_codes) {
    615      distance_postfix_bits = 0;
    616      num_direct_distance_codes = 0;
    617    }
    618  }
    619 
    620  BrotliInitDistanceParams(&params->dist, distance_postfix_bits,
    621                           num_direct_distance_codes, params->large_window);
    622 }
    623 
    624 static BROTLI_BOOL EnsureInitialized(BrotliEncoderState* s) {
    625  MemoryManager* m = &s->memory_manager_;
    626  if (BROTLI_IS_OOM(m)) return BROTLI_FALSE;
    627  if (s->is_initialized_) return BROTLI_TRUE;
    628 
    629  s->last_bytes_bits_ = 0;
    630  s->last_bytes_ = 0;
    631  s->flint_ = BROTLI_FLINT_DONE;
    632  s->remaining_metadata_bytes_ = BROTLI_UINT32_MAX;
    633 
    634  SanitizeParams(&s->params);
    635  s->params.lgblock = ComputeLgBlock(&s->params);
    636  ChooseDistanceParams(&s->params);
    637 
    638  if (s->params.stream_offset != 0) {
    639    s->flint_ = BROTLI_FLINT_NEEDS_2_BYTES;
    640    /* Poison the distance cache. -16 +- 3 is still less than zero (invalid). */
    641    s->dist_cache_[0] = -16;
    642    s->dist_cache_[1] = -16;
    643    s->dist_cache_[2] = -16;
    644    s->dist_cache_[3] = -16;
    645    memcpy(s->saved_dist_cache_, s->dist_cache_, sizeof(s->saved_dist_cache_));
    646  }
    647 
    648  RingBufferSetup(&s->params, &s->ringbuffer_);
    649 
    650  /* Initialize last byte with stream header. */
    651  {
    652    int lgwin = s->params.lgwin;
    653    if (s->params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY ||
    654        s->params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY) {
    655      lgwin = BROTLI_MAX(int, lgwin, 18);
    656    }
    657    if (s->params.stream_offset == 0) {
    658      EncodeWindowBits(lgwin, s->params.large_window,
    659                       &s->last_bytes_, &s->last_bytes_bits_);
    660    } else {
    661      /* Bigger values have the same effect, but could cause overflows. */
    662      s->params.stream_offset = BROTLI_MIN(size_t,
    663          s->params.stream_offset, BROTLI_MAX_BACKWARD_LIMIT(lgwin));
    664    }
    665  }
    666 
    667  if (s->params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY) {
    668    s->one_pass_arena_ = BROTLI_ALLOC(m, BrotliOnePassArena, 1);
    669    if (BROTLI_IS_OOM(m)) return BROTLI_FALSE;
    670    InitCommandPrefixCodes(s->one_pass_arena_);
    671  } else if (s->params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY) {
    672    s->two_pass_arena_ = BROTLI_ALLOC(m, BrotliTwoPassArena, 1);
    673    if (BROTLI_IS_OOM(m)) return BROTLI_FALSE;
    674  }
    675 
    676  s->is_initialized_ = BROTLI_TRUE;
    677  return BROTLI_TRUE;
    678 }
    679 
    680 static void BrotliEncoderInitParams(BrotliEncoderParams* params) {
    681  params->mode = BROTLI_DEFAULT_MODE;
    682  params->large_window = BROTLI_FALSE;
    683  params->quality = BROTLI_DEFAULT_QUALITY;
    684  params->lgwin = BROTLI_DEFAULT_WINDOW;
    685  params->lgblock = 0;
    686  params->stream_offset = 0;
    687  params->size_hint = 0;
    688  params->disable_literal_context_modeling = BROTLI_FALSE;
    689  BrotliInitSharedEncoderDictionary(&params->dictionary);
    690  params->dist.distance_postfix_bits = 0;
    691  params->dist.num_direct_distance_codes = 0;
    692  params->dist.alphabet_size_max =
    693      BROTLI_DISTANCE_ALPHABET_SIZE(0, 0, BROTLI_MAX_DISTANCE_BITS);
    694  params->dist.alphabet_size_limit = params->dist.alphabet_size_max;
    695  params->dist.max_distance = BROTLI_MAX_DISTANCE;
    696 }
    697 
    698 static void BrotliEncoderCleanupParams(MemoryManager* m,
    699    BrotliEncoderParams* params) {
    700  BrotliCleanupSharedEncoderDictionary(m, &params->dictionary);
    701 }
    702 
    703 #ifdef BROTLI_REPORTING
    704 /* When BROTLI_REPORTING is defined extra reporting module have to be linked. */
    705 void BrotliEncoderOnStart(const BrotliEncoderState* s);
    706 void BrotliEncoderOnFinish(const BrotliEncoderState* s);
    707 #define BROTLI_ENCODER_ON_START(s) BrotliEncoderOnStart(s);
    708 #define BROTLI_ENCODER_ON_FINISH(s) BrotliEncoderOnFinish(s);
    709 #else
    710 #if !defined(BROTLI_ENCODER_ON_START)
    711 #define BROTLI_ENCODER_ON_START(s) (void)(s);
    712 #endif
    713 #if !defined(BROTLI_ENCODER_ON_FINISH)
    714 #define BROTLI_ENCODER_ON_FINISH(s) (void)(s);
    715 #endif
    716 #endif
    717 
    718 static void BrotliEncoderInitState(BrotliEncoderState* s) {
    719  BROTLI_ENCODER_ON_START(s);
    720  BrotliEncoderInitParams(&s->params);
    721  s->input_pos_ = 0;
    722  s->num_commands_ = 0;
    723  s->num_literals_ = 0;
    724  s->last_insert_len_ = 0;
    725  s->last_flush_pos_ = 0;
    726  s->last_processed_pos_ = 0;
    727  s->prev_byte_ = 0;
    728  s->prev_byte2_ = 0;
    729  s->storage_size_ = 0;
    730  s->storage_ = 0;
    731  HasherInit(&s->hasher_);
    732  s->large_table_ = NULL;
    733  s->large_table_size_ = 0;
    734  s->one_pass_arena_ = NULL;
    735  s->two_pass_arena_ = NULL;
    736  s->command_buf_ = NULL;
    737  s->literal_buf_ = NULL;
    738  s->total_in_ = 0;
    739  s->next_out_ = NULL;
    740  s->available_out_ = 0;
    741  s->total_out_ = 0;
    742  s->stream_state_ = BROTLI_STREAM_PROCESSING;
    743  s->is_last_block_emitted_ = BROTLI_FALSE;
    744  s->is_initialized_ = BROTLI_FALSE;
    745 
    746  RingBufferInit(&s->ringbuffer_);
    747 
    748  s->commands_ = 0;
    749  s->cmd_alloc_size_ = 0;
    750 
    751  /* Initialize distance cache. */
    752  s->dist_cache_[0] = 4;
    753  s->dist_cache_[1] = 11;
    754  s->dist_cache_[2] = 15;
    755  s->dist_cache_[3] = 16;
    756  /* Save the state of the distance cache in case we need to restore it for
    757     emitting an uncompressed block. */
    758  memcpy(s->saved_dist_cache_, s->dist_cache_, sizeof(s->saved_dist_cache_));
    759 }
    760 
    761 BrotliEncoderState* BrotliEncoderCreateInstance(
    762    brotli_alloc_func alloc_func, brotli_free_func free_func, void* opaque) {
    763  BROTLI_BOOL healthy = BrotliEncoderEnsureStaticInit();
    764  if (!healthy) {
    765    return 0;
    766  }
    767  BrotliEncoderState* state = (BrotliEncoderState*)BrotliBootstrapAlloc(
    768      sizeof(BrotliEncoderState), alloc_func, free_func, opaque);
    769  if (state == NULL) {
    770    /* BROTLI_DUMP(); */
    771    return 0;
    772  }
    773  BrotliInitMemoryManager(
    774      &state->memory_manager_, alloc_func, free_func, opaque);
    775  BrotliEncoderInitState(state);
    776  return state;
    777 }
    778 
    779 static void BrotliEncoderCleanupState(BrotliEncoderState* s) {
    780  MemoryManager* m = &s->memory_manager_;
    781 
    782  BROTLI_ENCODER_ON_FINISH(s);
    783 
    784  if (BROTLI_IS_OOM(m)) {
    785    BrotliWipeOutMemoryManager(m);
    786    return;
    787  }
    788 
    789  BROTLI_FREE(m, s->storage_);
    790  BROTLI_FREE(m, s->commands_);
    791  RingBufferFree(m, &s->ringbuffer_);
    792  DestroyHasher(m, &s->hasher_);
    793  BROTLI_FREE(m, s->large_table_);
    794  BROTLI_FREE(m, s->one_pass_arena_);
    795  BROTLI_FREE(m, s->two_pass_arena_);
    796  BROTLI_FREE(m, s->command_buf_);
    797  BROTLI_FREE(m, s->literal_buf_);
    798  BrotliEncoderCleanupParams(m, &s->params);
    799 }
    800 
    801 /* Deinitializes and frees BrotliEncoderState instance. */
    802 void BrotliEncoderDestroyInstance(BrotliEncoderState* state) {
    803  if (!state) {
    804    return;
    805  } else {
    806    BrotliEncoderCleanupState(state);
    807    BrotliBootstrapFree(state, &state->memory_manager_);
    808  }
    809 }
    810 
    811 /*
    812   Copies the given input data to the internal ring buffer of the compressor.
    813   No processing of the data occurs at this time and this function can be
    814   called multiple times before calling WriteBrotliData() to process the
    815   accumulated input. At most input_block_size() bytes of input data can be
    816   copied to the ring buffer, otherwise the next WriteBrotliData() will fail.
    817 */
    818 static void CopyInputToRingBuffer(BrotliEncoderState* s,
    819                                  const size_t input_size,
    820                                  const uint8_t* input_buffer) {
    821  RingBuffer* ringbuffer_ = &s->ringbuffer_;
    822  MemoryManager* m = &s->memory_manager_;
    823  RingBufferWrite(m, input_buffer, input_size, ringbuffer_);
    824  if (BROTLI_IS_OOM(m)) return;
    825  s->input_pos_ += input_size;
    826 
    827  /* TL;DR: If needed, initialize 7 more bytes in the ring buffer to make the
    828     hashing not depend on uninitialized data. This makes compression
    829     deterministic and it prevents uninitialized memory warnings in Valgrind.
    830     Even without erasing, the output would be valid (but nondeterministic).
    831 
    832     Background information: The compressor stores short (at most 8 bytes)
    833     substrings of the input already read in a hash table, and detects
    834     repetitions by looking up such substrings in the hash table. If it
    835     can find a substring, it checks whether the substring is really there
    836     in the ring buffer (or it's just a hash collision). Should the hash
    837     table become corrupt, this check makes sure that the output is
    838     still valid, albeit the compression ratio would be bad.
    839 
    840     The compressor populates the hash table from the ring buffer as it's
    841     reading new bytes from the input. However, at the last few indexes of
    842     the ring buffer, there are not enough bytes to build full-length
    843     substrings from. Since the hash table always contains full-length
    844     substrings, we overwrite with zeros here to make sure that those
    845     substrings will contain zeros at the end instead of uninitialized
    846     data.
    847 
    848     Please note that erasing is not necessary (because the
    849     memory region is already initialized since he ring buffer
    850     has a `tail' that holds a copy of the beginning,) so we
    851     skip erasing if we have already gone around at least once in
    852     the ring buffer.
    853 
    854     Only clear during the first round of ring-buffer writes. On
    855     subsequent rounds data in the ring-buffer would be affected. */
    856  if (ringbuffer_->pos_ <= ringbuffer_->mask_) {
    857    /* This is the first time when the ring buffer is being written.
    858       We clear 7 bytes just after the bytes that have been copied from
    859       the input buffer.
    860 
    861       The ring-buffer has a "tail" that holds a copy of the beginning,
    862       but only once the ring buffer has been fully written once, i.e.,
    863       pos <= mask. For the first time, we need to write values
    864       in this tail (where index may be larger than mask), so that
    865       we have exactly defined behavior and don't read uninitialized
    866       memory. Due to performance reasons, hashing reads data using a
    867       LOAD64, which can go 7 bytes beyond the bytes written in the
    868       ring-buffer. */
    869    memset(ringbuffer_->buffer_ + ringbuffer_->pos_, 0, 7);
    870  }
    871 }
    872 
    873 /* Marks all input as processed.
    874   Returns true if position wrapping occurs. */
    875 static BROTLI_BOOL UpdateLastProcessedPos(BrotliEncoderState* s) {
    876  uint32_t wrapped_last_processed_pos = WrapPosition(s->last_processed_pos_);
    877  uint32_t wrapped_input_pos = WrapPosition(s->input_pos_);
    878  s->last_processed_pos_ = s->input_pos_;
    879  return TO_BROTLI_BOOL(wrapped_input_pos < wrapped_last_processed_pos);
    880 }
    881 
    882 static void ExtendLastCommand(BrotliEncoderState* s, uint32_t* bytes,
    883                              uint32_t* wrapped_last_processed_pos) {
    884  Command* last_command = &s->commands_[s->num_commands_ - 1];
    885  const uint8_t* data = s->ringbuffer_.buffer_;
    886  const uint32_t mask = s->ringbuffer_.mask_;
    887  uint64_t max_backward_distance =
    888      (((uint64_t)1) << s->params.lgwin) - BROTLI_WINDOW_GAP;
    889  uint64_t last_copy_len = last_command->copy_len_ & 0x1FFFFFF;
    890  uint64_t last_processed_pos = s->last_processed_pos_ - last_copy_len;
    891  uint64_t max_distance = last_processed_pos < max_backward_distance ?
    892      last_processed_pos : max_backward_distance;
    893  uint64_t cmd_dist = (uint64_t)s->dist_cache_[0];
    894  uint32_t distance_code = CommandRestoreDistanceCode(last_command,
    895                                                      &s->params.dist);
    896  const CompoundDictionary* dict = &s->params.dictionary.compound;
    897  size_t compound_dictionary_size = dict->total_size;
    898  if (distance_code < BROTLI_NUM_DISTANCE_SHORT_CODES ||
    899      distance_code - (BROTLI_NUM_DISTANCE_SHORT_CODES - 1) == cmd_dist) {
    900    if (cmd_dist <= max_distance) {
    901      while (*bytes != 0 && data[*wrapped_last_processed_pos & mask] ==
    902             data[(*wrapped_last_processed_pos - cmd_dist) & mask]) {
    903        last_command->copy_len_++;
    904        (*bytes)--;
    905        (*wrapped_last_processed_pos)++;
    906      }
    907    } else {
    908      if ((cmd_dist - max_distance - 1) < compound_dictionary_size &&
    909          last_copy_len < cmd_dist - max_distance) {
    910        size_t address =
    911            compound_dictionary_size - (size_t)(cmd_dist - max_distance) +
    912            (size_t)last_copy_len;
    913        size_t br_index = 0;
    914        size_t br_offset;
    915        const uint8_t* chunk;
    916        size_t chunk_length;
    917        while (address >= dict->chunk_offsets[br_index + 1]) br_index++;
    918        br_offset = address - dict->chunk_offsets[br_index];
    919        chunk = dict->chunk_source[br_index];
    920        chunk_length =
    921            dict->chunk_offsets[br_index + 1] - dict->chunk_offsets[br_index];
    922        while (*bytes != 0 && data[*wrapped_last_processed_pos & mask] ==
    923               chunk[br_offset]) {
    924          last_command->copy_len_++;
    925          (*bytes)--;
    926          (*wrapped_last_processed_pos)++;
    927          if (++br_offset == chunk_length) {
    928            br_index++;
    929            br_offset = 0;
    930            if (br_index != dict->num_chunks) {
    931              chunk = dict->chunk_source[br_index];
    932              chunk_length = dict->chunk_offsets[br_index + 1] -
    933                  dict->chunk_offsets[br_index];
    934            } else {
    935              break;
    936            }
    937          }
    938        }
    939      }
    940    }
    941    /* The copy length is at most the metablock size, and thus expressible. */
    942    GetLengthCode(last_command->insert_len_,
    943                  (size_t)((int)(last_command->copy_len_ & 0x1FFFFFF) +
    944                           (int)(last_command->copy_len_ >> 25)),
    945                  TO_BROTLI_BOOL((last_command->dist_prefix_ & 0x3FF) == 0),
    946                  &last_command->cmd_prefix_);
    947  }
    948 }
    949 
    950 /*
    951   Processes the accumulated input data and sets |*out_size| to the length of
    952   the new output meta-block, or to zero if no new output meta-block has been
    953   created (in this case the processed input data is buffered internally).
    954   If |*out_size| is positive, |*output| points to the start of the output
    955   data. If |is_last| or |force_flush| is BROTLI_TRUE, an output meta-block is
    956   always created. However, until |is_last| is BROTLI_TRUE encoder may retain up
    957   to 7 bits of the last byte of output. Byte-alignment could be enforced by
    958   emitting an empty meta-data block.
    959   Returns BROTLI_FALSE if the size of the input data is larger than
    960   input_block_size().
    961 */
    962 static BROTLI_BOOL EncodeData(
    963    BrotliEncoderState* s, const BROTLI_BOOL is_last,
    964    const BROTLI_BOOL force_flush, size_t* out_size, uint8_t** output) {
    965  const uint64_t delta = UnprocessedInputSize(s);
    966  uint32_t bytes = (uint32_t)delta;
    967  uint32_t wrapped_last_processed_pos = WrapPosition(s->last_processed_pos_);
    968  uint8_t* data;
    969  uint32_t mask;
    970  MemoryManager* m = &s->memory_manager_;
    971  ContextType literal_context_mode;
    972  ContextLut literal_context_lut;
    973  BROTLI_BOOL fast_compress =
    974      s->params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY ||
    975      s->params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY;
    976 
    977  data = s->ringbuffer_.buffer_;
    978  mask = s->ringbuffer_.mask_;
    979 
    980  if (delta == 0) {  /* No new input; still might want to flush or finish. */
    981    if (!data) {  /* No input has been processed so far. */
    982      if (is_last) {  /* Emit complete finalized stream. */
    983        BROTLI_DCHECK(s->last_bytes_bits_ <= 14);
    984        s->last_bytes_ |= (uint16_t)(3u << s->last_bytes_bits_);
    985        s->last_bytes_bits_ = (uint8_t)(s->last_bytes_bits_ + 2u);
    986        s->tiny_buf_.u8[0] = (uint8_t)s->last_bytes_;
    987        s->tiny_buf_.u8[1] = (uint8_t)(s->last_bytes_ >> 8);
    988        *output = s->tiny_buf_.u8;
    989        *out_size = (s->last_bytes_bits_ + 7u) >> 3u;
    990        return BROTLI_TRUE;
    991      } else {  /* No data, not last -> no-op. */
    992        *out_size = 0;
    993        return BROTLI_TRUE;
    994      }
    995    } else {
    996      /* Fast compress performs flush every block -> flush is no-op. */
    997      if (!is_last && (!force_flush || fast_compress)) {  /* Another no-op. */
    998        *out_size = 0;
    999        return BROTLI_TRUE;
   1000      }
   1001    }
   1002  }
   1003  BROTLI_DCHECK(data);
   1004 
   1005  if (s->params.quality > s->params.dictionary.max_quality) return BROTLI_FALSE;
   1006  /* Adding more blocks after "last" block is forbidden. */
   1007  if (s->is_last_block_emitted_) return BROTLI_FALSE;
   1008  if (is_last) s->is_last_block_emitted_ = BROTLI_TRUE;
   1009 
   1010  if (delta > InputBlockSize(s)) {
   1011    return BROTLI_FALSE;
   1012  }
   1013  if (s->params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY &&
   1014      !s->command_buf_) {
   1015    s->command_buf_ =
   1016        BROTLI_ALLOC(m, uint32_t, kCompressFragmentTwoPassBlockSize);
   1017    s->literal_buf_ =
   1018        BROTLI_ALLOC(m, uint8_t, kCompressFragmentTwoPassBlockSize);
   1019    if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(s->command_buf_) ||
   1020        BROTLI_IS_NULL(s->literal_buf_)) {
   1021      return BROTLI_FALSE;
   1022    }
   1023  }
   1024 
   1025  if (fast_compress) {
   1026    uint8_t* storage;
   1027    size_t storage_ix = s->last_bytes_bits_;
   1028    size_t table_size;
   1029    int* table;
   1030 
   1031    storage = GetBrotliStorage(s, 2 * bytes + 503);
   1032    if (BROTLI_IS_OOM(m)) return BROTLI_FALSE;
   1033    storage[0] = (uint8_t)s->last_bytes_;
   1034    storage[1] = (uint8_t)(s->last_bytes_ >> 8);
   1035    table = GetHashTable(s, s->params.quality, bytes, &table_size);
   1036    if (BROTLI_IS_OOM(m)) return BROTLI_FALSE;
   1037    if (s->params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY) {
   1038      BrotliCompressFragmentFast(
   1039          s->one_pass_arena_, &data[wrapped_last_processed_pos & mask],
   1040          bytes, is_last,
   1041          table, table_size,
   1042          &storage_ix, storage);
   1043      if (BROTLI_IS_OOM(m)) return BROTLI_FALSE;
   1044    } else {
   1045      BrotliCompressFragmentTwoPass(
   1046          s->two_pass_arena_, &data[wrapped_last_processed_pos & mask],
   1047          bytes, is_last,
   1048          s->command_buf_, s->literal_buf_,
   1049          table, table_size,
   1050          &storage_ix, storage);
   1051      if (BROTLI_IS_OOM(m)) return BROTLI_FALSE;
   1052    }
   1053    s->last_bytes_ = (uint16_t)(storage[storage_ix >> 3]);
   1054    s->last_bytes_bits_ = storage_ix & 7u;
   1055    UpdateLastProcessedPos(s);
   1056    *output = &storage[0];
   1057    *out_size = storage_ix >> 3;
   1058    return BROTLI_TRUE;
   1059  }
   1060 
   1061  {
   1062    /* Theoretical max number of commands is 1 per 2 bytes. */
   1063    size_t newsize = s->num_commands_ + bytes / 2 + 1;
   1064    if (newsize > s->cmd_alloc_size_) {
   1065      Command* new_commands;
   1066      /* Reserve a bit more memory to allow merging with a next block
   1067         without reallocation: that would impact speed. */
   1068      newsize += (bytes / 4) + 16;
   1069      s->cmd_alloc_size_ = newsize;
   1070      new_commands = BROTLI_ALLOC(m, Command, newsize);
   1071      if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(new_commands)) return BROTLI_FALSE;
   1072      if (s->commands_) {
   1073        memcpy(new_commands, s->commands_, sizeof(Command) * s->num_commands_);
   1074        BROTLI_FREE(m, s->commands_);
   1075      }
   1076      s->commands_ = new_commands;
   1077    }
   1078  }
   1079 
   1080  InitOrStitchToPreviousBlock(m, &s->hasher_, data, mask, &s->params,
   1081      wrapped_last_processed_pos, bytes, is_last);
   1082 
   1083  literal_context_mode = ChooseContextMode(
   1084      &s->params, data, WrapPosition(s->last_flush_pos_),
   1085      mask, (size_t)(s->input_pos_ - s->last_flush_pos_));
   1086  literal_context_lut = BROTLI_CONTEXT_LUT(literal_context_mode);
   1087 
   1088  if (BROTLI_IS_OOM(m)) return BROTLI_FALSE;
   1089 
   1090  if (s->num_commands_ && s->last_insert_len_ == 0) {
   1091    ExtendLastCommand(s, &bytes, &wrapped_last_processed_pos);
   1092  }
   1093 
   1094  if (s->params.quality == ZOPFLIFICATION_QUALITY) {
   1095    BROTLI_DCHECK(s->params.hasher.type == 10);
   1096    BrotliCreateZopfliBackwardReferences(m, bytes, wrapped_last_processed_pos,
   1097        data, mask, literal_context_lut, &s->params,
   1098        &s->hasher_, s->dist_cache_,
   1099        &s->last_insert_len_, &s->commands_[s->num_commands_],
   1100        &s->num_commands_, &s->num_literals_);
   1101    if (BROTLI_IS_OOM(m)) return BROTLI_FALSE;
   1102  } else if (s->params.quality == HQ_ZOPFLIFICATION_QUALITY) {
   1103    BROTLI_DCHECK(s->params.hasher.type == 10);
   1104    BrotliCreateHqZopfliBackwardReferences(m, bytes, wrapped_last_processed_pos,
   1105        data, mask, literal_context_lut, &s->params,
   1106        &s->hasher_, s->dist_cache_,
   1107        &s->last_insert_len_, &s->commands_[s->num_commands_],
   1108        &s->num_commands_, &s->num_literals_);
   1109    if (BROTLI_IS_OOM(m)) return BROTLI_FALSE;
   1110  } else {
   1111    BrotliCreateBackwardReferences(bytes, wrapped_last_processed_pos,
   1112        data, mask, literal_context_lut, &s->params,
   1113        &s->hasher_, s->dist_cache_,
   1114        &s->last_insert_len_, &s->commands_[s->num_commands_],
   1115        &s->num_commands_, &s->num_literals_);
   1116  }
   1117 
   1118  {
   1119    const size_t max_length = MaxMetablockSize(&s->params);
   1120    const size_t max_literals = max_length / 8;
   1121    const size_t max_commands = max_length / 8;
   1122    const size_t processed_bytes = (size_t)(s->input_pos_ - s->last_flush_pos_);
   1123    /* If maximal possible additional block doesn't fit metablock, flush now. */
   1124    /* TODO(eustas): Postpone decision until next block arrives? */
   1125    const BROTLI_BOOL next_input_fits_metablock = TO_BROTLI_BOOL(
   1126        processed_bytes + InputBlockSize(s) <= max_length);
   1127    /* If block splitting is not used, then flush as soon as there is some
   1128       amount of commands / literals produced. */
   1129    const BROTLI_BOOL should_flush = TO_BROTLI_BOOL(
   1130        s->params.quality < MIN_QUALITY_FOR_BLOCK_SPLIT &&
   1131        s->num_literals_ + s->num_commands_ >= MAX_NUM_DELAYED_SYMBOLS);
   1132    if (!is_last && !force_flush && !should_flush &&
   1133        next_input_fits_metablock &&
   1134        s->num_literals_ < max_literals &&
   1135        s->num_commands_ < max_commands) {
   1136      /* Merge with next input block. Everything will happen later. */
   1137      if (UpdateLastProcessedPos(s)) {
   1138        HasherReset(&s->hasher_);
   1139      }
   1140      *out_size = 0;
   1141      return BROTLI_TRUE;
   1142    }
   1143  }
   1144 
   1145  /* Create the last insert-only command. */
   1146  if (s->last_insert_len_ > 0) {
   1147    InitInsertCommand(&s->commands_[s->num_commands_++], s->last_insert_len_);
   1148    s->num_literals_ += s->last_insert_len_;
   1149    s->last_insert_len_ = 0;
   1150  }
   1151 
   1152  if (!is_last && s->input_pos_ == s->last_flush_pos_) {
   1153    /* We have no new input data and we don't have to finish the stream, so
   1154       nothing to do. */
   1155    *out_size = 0;
   1156    return BROTLI_TRUE;
   1157  }
   1158  BROTLI_DCHECK(s->input_pos_ >= s->last_flush_pos_);
   1159  BROTLI_DCHECK(s->input_pos_ > s->last_flush_pos_ || is_last);
   1160  BROTLI_DCHECK(s->input_pos_ - s->last_flush_pos_ <= 1u << 24);
   1161  {
   1162    const uint32_t metablock_size =
   1163        (uint32_t)(s->input_pos_ - s->last_flush_pos_);
   1164    uint8_t* storage = GetBrotliStorage(s, 2 * metablock_size + 503);
   1165    size_t storage_ix = s->last_bytes_bits_;
   1166    if (BROTLI_IS_OOM(m)) return BROTLI_FALSE;
   1167    storage[0] = (uint8_t)s->last_bytes_;
   1168    storage[1] = (uint8_t)(s->last_bytes_ >> 8);
   1169    WriteMetaBlockInternal(
   1170        m, data, mask, s->last_flush_pos_, metablock_size, is_last,
   1171        literal_context_mode, &s->params, s->prev_byte_, s->prev_byte2_,
   1172        s->num_literals_, s->num_commands_, s->commands_, s->saved_dist_cache_,
   1173        s->dist_cache_, &storage_ix, storage);
   1174    if (BROTLI_IS_OOM(m)) return BROTLI_FALSE;
   1175    s->last_bytes_ = (uint16_t)(storage[storage_ix >> 3]);
   1176    s->last_bytes_bits_ = storage_ix & 7u;
   1177    s->last_flush_pos_ = s->input_pos_;
   1178    if (UpdateLastProcessedPos(s)) {
   1179      HasherReset(&s->hasher_);
   1180    }
   1181    if (s->last_flush_pos_ > 0) {
   1182      s->prev_byte_ = data[((uint32_t)s->last_flush_pos_ - 1) & mask];
   1183    }
   1184    if (s->last_flush_pos_ > 1) {
   1185      s->prev_byte2_ = data[(uint32_t)(s->last_flush_pos_ - 2) & mask];
   1186    }
   1187    s->num_commands_ = 0;
   1188    s->num_literals_ = 0;
   1189    /* Save the state of the distance cache in case we need to restore it for
   1190       emitting an uncompressed block. */
   1191    memcpy(s->saved_dist_cache_, s->dist_cache_, sizeof(s->saved_dist_cache_));
   1192    *output = &storage[0];
   1193    *out_size = storage_ix >> 3;
   1194    return BROTLI_TRUE;
   1195  }
   1196 }
   1197 
   1198 /* Dumps remaining output bits and metadata header to |header|.
   1199   Returns number of produced bytes.
   1200   REQUIRED: |header| should be 8-byte aligned and at least 16 bytes long.
   1201   REQUIRED: |block_size| <= (1 << 24). */
   1202 static size_t WriteMetadataHeader(
   1203    BrotliEncoderState* s, const size_t block_size, uint8_t* header) {
   1204  size_t storage_ix;
   1205  storage_ix = s->last_bytes_bits_;
   1206  header[0] = (uint8_t)s->last_bytes_;
   1207  header[1] = (uint8_t)(s->last_bytes_ >> 8);
   1208  s->last_bytes_ = 0;
   1209  s->last_bytes_bits_ = 0;
   1210 
   1211  BrotliWriteBits(1, 0, &storage_ix, header);
   1212  BrotliWriteBits(2, 3, &storage_ix, header);
   1213  BrotliWriteBits(1, 0, &storage_ix, header);
   1214  if (block_size == 0) {
   1215    BrotliWriteBits(2, 0, &storage_ix, header);
   1216  } else {
   1217    uint32_t nbits = (block_size == 1) ? 1 :
   1218        (Log2FloorNonZero((uint32_t)block_size - 1) + 1);
   1219    uint32_t nbytes = (nbits + 7) / 8;
   1220    BrotliWriteBits(2, nbytes, &storage_ix, header);
   1221    BrotliWriteBits(8 * nbytes, block_size - 1, &storage_ix, header);
   1222  }
   1223  return (storage_ix + 7u) >> 3;
   1224 }
   1225 
   1226 size_t BrotliEncoderMaxCompressedSize(size_t input_size) {
   1227  /* [window bits / empty metadata] + N * [uncompressed] + [last empty] */
   1228  size_t num_large_blocks = input_size >> 14;
   1229  size_t overhead = 2 + (4 * num_large_blocks) + 3 + 1;
   1230  size_t result = input_size + overhead;
   1231  if (input_size == 0) return 2;
   1232  return (result < input_size) ? 0 : result;
   1233 }
   1234 
   1235 /* Wraps data to uncompressed brotli stream with minimal window size.
   1236   |output| should point at region with at least BrotliEncoderMaxCompressedSize
   1237   addressable bytes.
   1238   Returns the length of stream. */
   1239 static size_t MakeUncompressedStream(
   1240    const uint8_t* input, size_t input_size, uint8_t* output) {
   1241  size_t size = input_size;
   1242  size_t result = 0;
   1243  size_t offset = 0;
   1244  if (input_size == 0) {
   1245    output[0] = 6;
   1246    return 1;
   1247  }
   1248  output[result++] = 0x21;  /* window bits = 10, is_last = false */
   1249  output[result++] = 0x03;  /* empty metadata, padding */
   1250  while (size > 0) {
   1251    uint32_t nibbles = 0;
   1252    uint32_t chunk_size;
   1253    uint32_t bits;
   1254    chunk_size = (size > (1u << 24)) ? (1u << 24) : (uint32_t)size;
   1255    if (chunk_size > (1u << 16)) nibbles = (chunk_size > (1u << 20)) ? 2 : 1;
   1256    bits =
   1257        (nibbles << 1) | ((chunk_size - 1) << 3) | (1u << (19 + 4 * nibbles));
   1258    output[result++] = (uint8_t)bits;
   1259    output[result++] = (uint8_t)(bits >> 8);
   1260    output[result++] = (uint8_t)(bits >> 16);
   1261    if (nibbles == 2) output[result++] = (uint8_t)(bits >> 24);
   1262    memcpy(&output[result], &input[offset], chunk_size);
   1263    result += chunk_size;
   1264    offset += chunk_size;
   1265    size -= chunk_size;
   1266  }
   1267  output[result++] = 3;
   1268  return result;
   1269 }
   1270 
   1271 BROTLI_BOOL BrotliEncoderCompress(
   1272    int quality, int lgwin, BrotliEncoderMode mode, size_t input_size,
   1273    const uint8_t input_buffer[BROTLI_ARRAY_PARAM(input_size)],
   1274    size_t* encoded_size,
   1275    uint8_t encoded_buffer[BROTLI_ARRAY_PARAM(*encoded_size)]) {
   1276  BrotliEncoderState* s;
   1277  size_t out_size = *encoded_size;
   1278  const uint8_t* input_start = input_buffer;
   1279  uint8_t* output_start = encoded_buffer;
   1280  size_t max_out_size = BrotliEncoderMaxCompressedSize(input_size);
   1281  if (out_size == 0) {
   1282    /* Output buffer needs at least one byte. */
   1283    return BROTLI_FALSE;
   1284  }
   1285  if (input_size == 0) {
   1286    /* Handle the special case of empty input. */
   1287    *encoded_size = 1;
   1288    *encoded_buffer = 6;
   1289    return BROTLI_TRUE;
   1290  }
   1291 
   1292  s = BrotliEncoderCreateInstance(0, 0, 0);
   1293  if (!s) {
   1294    return BROTLI_FALSE;
   1295  } else {
   1296    size_t available_in = input_size;
   1297    const uint8_t* next_in = input_buffer;
   1298    size_t available_out = *encoded_size;
   1299    uint8_t* next_out = encoded_buffer;
   1300    size_t total_out = 0;
   1301    BROTLI_BOOL result = BROTLI_FALSE;
   1302    /* TODO(eustas): check that parameters are sane. */
   1303    BrotliEncoderSetParameter(s, BROTLI_PARAM_QUALITY, (uint32_t)quality);
   1304    BrotliEncoderSetParameter(s, BROTLI_PARAM_LGWIN, (uint32_t)lgwin);
   1305    BrotliEncoderSetParameter(s, BROTLI_PARAM_MODE, (uint32_t)mode);
   1306    BrotliEncoderSetParameter(s, BROTLI_PARAM_SIZE_HINT, (uint32_t)input_size);
   1307    if (lgwin > BROTLI_MAX_WINDOW_BITS) {
   1308      BrotliEncoderSetParameter(s, BROTLI_PARAM_LARGE_WINDOW, BROTLI_TRUE);
   1309    }
   1310    result = BrotliEncoderCompressStream(s, BROTLI_OPERATION_FINISH,
   1311        &available_in, &next_in, &available_out, &next_out, &total_out);
   1312    if (!BrotliEncoderIsFinished(s)) result = 0;
   1313    *encoded_size = total_out;
   1314    BrotliEncoderDestroyInstance(s);
   1315    if (!result || (max_out_size && *encoded_size > max_out_size)) {
   1316      goto fallback;
   1317    }
   1318    return BROTLI_TRUE;
   1319  }
   1320 fallback:
   1321  *encoded_size = 0;
   1322  if (!max_out_size) return BROTLI_FALSE;
   1323  if (out_size >= max_out_size) {
   1324    *encoded_size =
   1325        MakeUncompressedStream(input_start, input_size, output_start);
   1326    return BROTLI_TRUE;
   1327  }
   1328  return BROTLI_FALSE;
   1329 }
   1330 
   1331 static void InjectBytePaddingBlock(BrotliEncoderState* s) {
   1332  uint32_t seal = s->last_bytes_;
   1333  size_t seal_bits = s->last_bytes_bits_;
   1334  uint8_t* destination;
   1335  s->last_bytes_ = 0;
   1336  s->last_bytes_bits_ = 0;
   1337  /* is_last = 0, data_nibbles = 11, reserved = 0, meta_nibbles = 00 */
   1338  seal |= 0x6u << seal_bits;
   1339  seal_bits += 6;
   1340  /* If we have already created storage, then append to it.
   1341     Storage is valid until next block is being compressed. */
   1342  if (s->next_out_) {
   1343    destination = s->next_out_ + s->available_out_;
   1344  } else {
   1345    destination = s->tiny_buf_.u8;
   1346    s->next_out_ = destination;
   1347  }
   1348  destination[0] = (uint8_t)seal;
   1349  if (seal_bits > 8) destination[1] = (uint8_t)(seal >> 8);
   1350  if (seal_bits > 16) destination[2] = (uint8_t)(seal >> 16);
   1351  s->available_out_ += (seal_bits + 7) >> 3;
   1352 }
   1353 
   1354 /* Fills the |total_out|, if it is not NULL. */
   1355 static void SetTotalOut(BrotliEncoderState* s, size_t* total_out) {
   1356  if (total_out) {
   1357    /* Saturating conversion uint64_t -> size_t */
   1358    size_t result = (size_t)-1;
   1359    if (s->total_out_ < result) {
   1360      result = (size_t)s->total_out_;
   1361    }
   1362    *total_out = result;
   1363  }
   1364 }
   1365 
   1366 /* Injects padding bits or pushes compressed data to output.
   1367   Returns false if nothing is done. */
   1368 static BROTLI_BOOL InjectFlushOrPushOutput(BrotliEncoderState* s,
   1369    size_t* available_out, uint8_t** next_out, size_t* total_out) {
   1370  if (s->stream_state_ == BROTLI_STREAM_FLUSH_REQUESTED &&
   1371      s->last_bytes_bits_ != 0) {
   1372    InjectBytePaddingBlock(s);
   1373    return BROTLI_TRUE;
   1374  }
   1375 
   1376  if (s->available_out_ != 0 && *available_out != 0) {
   1377    size_t copy_output_size =
   1378        BROTLI_MIN(size_t, s->available_out_, *available_out);
   1379    memcpy(*next_out, s->next_out_, copy_output_size);
   1380    *next_out += copy_output_size;
   1381    *available_out -= copy_output_size;
   1382    s->next_out_ += copy_output_size;
   1383    s->available_out_ -= copy_output_size;
   1384    s->total_out_ += copy_output_size;
   1385    SetTotalOut(s, total_out);
   1386    return BROTLI_TRUE;
   1387  }
   1388 
   1389  return BROTLI_FALSE;
   1390 }
   1391 
   1392 static void CheckFlushComplete(BrotliEncoderState* s) {
   1393  if (s->stream_state_ == BROTLI_STREAM_FLUSH_REQUESTED &&
   1394      s->available_out_ == 0) {
   1395    s->stream_state_ = BROTLI_STREAM_PROCESSING;
   1396    s->next_out_ = 0;
   1397  }
   1398 }
   1399 
   1400 static BROTLI_BOOL BrotliEncoderCompressStreamFast(
   1401    BrotliEncoderState* s, BrotliEncoderOperation op, size_t* available_in,
   1402    const uint8_t** next_in, size_t* available_out, uint8_t** next_out,
   1403    size_t* total_out) {
   1404  const size_t block_size_limit = (size_t)1 << s->params.lgwin;
   1405  const size_t buf_size = BROTLI_MIN(size_t, kCompressFragmentTwoPassBlockSize,
   1406      BROTLI_MIN(size_t, *available_in, block_size_limit));
   1407  uint32_t* tmp_command_buf = NULL;
   1408  uint32_t* command_buf = NULL;
   1409  uint8_t* tmp_literal_buf = NULL;
   1410  uint8_t* literal_buf = NULL;
   1411  MemoryManager* m = &s->memory_manager_;
   1412  if (s->params.quality != FAST_ONE_PASS_COMPRESSION_QUALITY &&
   1413      s->params.quality != FAST_TWO_PASS_COMPRESSION_QUALITY) {
   1414    return BROTLI_FALSE;
   1415  }
   1416  if (s->params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY) {
   1417    if (!s->command_buf_ && buf_size == kCompressFragmentTwoPassBlockSize) {
   1418      s->command_buf_ =
   1419          BROTLI_ALLOC(m, uint32_t, kCompressFragmentTwoPassBlockSize);
   1420      s->literal_buf_ =
   1421          BROTLI_ALLOC(m, uint8_t, kCompressFragmentTwoPassBlockSize);
   1422      if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(s->command_buf_) ||
   1423          BROTLI_IS_NULL(s->literal_buf_)) {
   1424        return BROTLI_FALSE;
   1425      }
   1426    }
   1427    if (s->command_buf_) {
   1428      command_buf = s->command_buf_;
   1429      literal_buf = s->literal_buf_;
   1430    } else {
   1431      tmp_command_buf = BROTLI_ALLOC(m, uint32_t, buf_size);
   1432      tmp_literal_buf = BROTLI_ALLOC(m, uint8_t, buf_size);
   1433      if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(tmp_command_buf) ||
   1434          BROTLI_IS_NULL(tmp_literal_buf)) {
   1435        return BROTLI_FALSE;
   1436      }
   1437      command_buf = tmp_command_buf;
   1438      literal_buf = tmp_literal_buf;
   1439    }
   1440  }
   1441 
   1442  while (BROTLI_TRUE) {
   1443    if (InjectFlushOrPushOutput(s, available_out, next_out, total_out)) {
   1444      continue;
   1445    }
   1446 
   1447    /* Compress block only when internal output buffer is empty, stream is not
   1448       finished, there is no pending flush request, and there is either
   1449       additional input or pending operation. */
   1450    if (s->available_out_ == 0 &&
   1451        s->stream_state_ == BROTLI_STREAM_PROCESSING &&
   1452        (*available_in != 0 || op != BROTLI_OPERATION_PROCESS)) {
   1453      size_t block_size = BROTLI_MIN(size_t, block_size_limit, *available_in);
   1454      BROTLI_BOOL is_last =
   1455          (*available_in == block_size) && (op == BROTLI_OPERATION_FINISH);
   1456      BROTLI_BOOL force_flush =
   1457          (*available_in == block_size) && (op == BROTLI_OPERATION_FLUSH);
   1458      size_t max_out_size = 2 * block_size + 503;
   1459      BROTLI_BOOL inplace = BROTLI_TRUE;
   1460      uint8_t* storage = NULL;
   1461      size_t storage_ix = s->last_bytes_bits_;
   1462      size_t table_size;
   1463      int* table;
   1464 
   1465      if (force_flush && block_size == 0) {
   1466        s->stream_state_ = BROTLI_STREAM_FLUSH_REQUESTED;
   1467        continue;
   1468      }
   1469      if (max_out_size <= *available_out) {
   1470        storage = *next_out;
   1471      } else {
   1472        inplace = BROTLI_FALSE;
   1473        storage = GetBrotliStorage(s, max_out_size);
   1474        if (BROTLI_IS_OOM(m)) return BROTLI_FALSE;
   1475      }
   1476      storage[0] = (uint8_t)s->last_bytes_;
   1477      storage[1] = (uint8_t)(s->last_bytes_ >> 8);
   1478      table = GetHashTable(s, s->params.quality, block_size, &table_size);
   1479      if (BROTLI_IS_OOM(m)) return BROTLI_FALSE;
   1480 
   1481      if (s->params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY) {
   1482        BrotliCompressFragmentFast(s->one_pass_arena_, *next_in, block_size,
   1483            is_last, table, table_size, &storage_ix, storage);
   1484        if (BROTLI_IS_OOM(m)) return BROTLI_FALSE;
   1485      } else {
   1486        BrotliCompressFragmentTwoPass(s->two_pass_arena_, *next_in, block_size,
   1487            is_last, command_buf, literal_buf, table, table_size,
   1488            &storage_ix, storage);
   1489        if (BROTLI_IS_OOM(m)) return BROTLI_FALSE;
   1490      }
   1491      if (block_size != 0) {
   1492        *next_in += block_size;
   1493        *available_in -= block_size;
   1494        s->total_in_ += block_size;
   1495      }
   1496      if (inplace) {
   1497        size_t out_bytes = storage_ix >> 3;
   1498        BROTLI_DCHECK(out_bytes <= *available_out);
   1499        BROTLI_DCHECK((storage_ix & 7) == 0 || out_bytes < *available_out);
   1500        *next_out += out_bytes;
   1501        *available_out -= out_bytes;
   1502        s->total_out_ += out_bytes;
   1503        SetTotalOut(s, total_out);
   1504      } else {
   1505        size_t out_bytes = storage_ix >> 3;
   1506        s->next_out_ = storage;
   1507        s->available_out_ = out_bytes;
   1508      }
   1509      s->last_bytes_ = (uint16_t)(storage[storage_ix >> 3]);
   1510      s->last_bytes_bits_ = storage_ix & 7u;
   1511 
   1512      if (force_flush) s->stream_state_ = BROTLI_STREAM_FLUSH_REQUESTED;
   1513      if (is_last) s->stream_state_ = BROTLI_STREAM_FINISHED;
   1514      continue;
   1515    }
   1516    break;
   1517  }
   1518  BROTLI_FREE(m, tmp_command_buf);
   1519  BROTLI_FREE(m, tmp_literal_buf);
   1520  CheckFlushComplete(s);
   1521  return BROTLI_TRUE;
   1522 }
   1523 
   1524 static BROTLI_BOOL ProcessMetadata(
   1525    BrotliEncoderState* s, size_t* available_in, const uint8_t** next_in,
   1526    size_t* available_out, uint8_t** next_out, size_t* total_out) {
   1527  if (*available_in > (1u << 24)) return BROTLI_FALSE;
   1528  /* Switch to metadata block workflow, if required. */
   1529  if (s->stream_state_ == BROTLI_STREAM_PROCESSING) {
   1530    s->remaining_metadata_bytes_ = (uint32_t)*available_in;
   1531    s->stream_state_ = BROTLI_STREAM_METADATA_HEAD;
   1532  }
   1533  if (s->stream_state_ != BROTLI_STREAM_METADATA_HEAD &&
   1534      s->stream_state_ != BROTLI_STREAM_METADATA_BODY) {
   1535    return BROTLI_FALSE;
   1536  }
   1537 
   1538  while (BROTLI_TRUE) {
   1539    if (InjectFlushOrPushOutput(s, available_out, next_out, total_out)) {
   1540      continue;
   1541    }
   1542    if (s->available_out_ != 0) break;
   1543 
   1544    if (s->input_pos_ != s->last_flush_pos_) {
   1545      BROTLI_BOOL result = EncodeData(s, BROTLI_FALSE, BROTLI_TRUE,
   1546          &s->available_out_, &s->next_out_);
   1547      if (!result) return BROTLI_FALSE;
   1548      continue;
   1549    }
   1550 
   1551    if (s->stream_state_ == BROTLI_STREAM_METADATA_HEAD) {
   1552      s->next_out_ = s->tiny_buf_.u8;
   1553      s->available_out_ =
   1554          WriteMetadataHeader(s, s->remaining_metadata_bytes_, s->next_out_);
   1555      s->stream_state_ = BROTLI_STREAM_METADATA_BODY;
   1556      continue;
   1557    } else {
   1558      /* Exit workflow only when there is no more input and no more output.
   1559         Otherwise client may continue producing empty metadata blocks. */
   1560      if (s->remaining_metadata_bytes_ == 0) {
   1561        s->remaining_metadata_bytes_ = BROTLI_UINT32_MAX;
   1562        s->stream_state_ = BROTLI_STREAM_PROCESSING;
   1563        break;
   1564      }
   1565      if (*available_out) {
   1566        /* Directly copy input to output. */
   1567        uint32_t copy = (uint32_t)BROTLI_MIN(
   1568            size_t, s->remaining_metadata_bytes_, *available_out);
   1569        memcpy(*next_out, *next_in, copy);
   1570        *next_in += copy;
   1571        *available_in -= copy;
   1572        s->total_in_ += copy;  /* not actually data input, though */
   1573        s->remaining_metadata_bytes_ -= copy;
   1574        *next_out += copy;
   1575        *available_out -= copy;
   1576      } else {
   1577        /* This guarantees progress in "TakeOutput" workflow. */
   1578        uint32_t copy = BROTLI_MIN(uint32_t, s->remaining_metadata_bytes_, 16);
   1579        s->next_out_ = s->tiny_buf_.u8;
   1580        memcpy(s->next_out_, *next_in, copy);
   1581        *next_in += copy;
   1582        *available_in -= copy;
   1583        s->total_in_ += copy;  /* not actually data input, though */
   1584        s->remaining_metadata_bytes_ -= copy;
   1585        s->available_out_ = copy;
   1586      }
   1587      continue;
   1588    }
   1589  }
   1590 
   1591  return BROTLI_TRUE;
   1592 }
   1593 
   1594 static void UpdateSizeHint(BrotliEncoderState* s, size_t available_in) {
   1595  if (s->params.size_hint == 0) {
   1596    uint64_t delta = UnprocessedInputSize(s);
   1597    uint64_t tail = available_in;
   1598    uint32_t limit = 1u << 30;
   1599    uint32_t total;
   1600    if ((delta >= limit) || (tail >= limit) || ((delta + tail) >= limit)) {
   1601      total = limit;
   1602    } else {
   1603      total = (uint32_t)(delta + tail);
   1604    }
   1605    s->params.size_hint = total;
   1606  }
   1607 }
   1608 
   1609 BROTLI_BOOL BrotliEncoderCompressStream(
   1610    BrotliEncoderState* s, BrotliEncoderOperation op, size_t* available_in,
   1611    const uint8_t** next_in, size_t* available_out, uint8_t** next_out,
   1612    size_t* total_out) {
   1613  if (!EnsureInitialized(s)) return BROTLI_FALSE;
   1614 
   1615  /* Unfinished metadata block; check requirements. */
   1616  if (s->remaining_metadata_bytes_ != BROTLI_UINT32_MAX) {
   1617    if (*available_in != s->remaining_metadata_bytes_) return BROTLI_FALSE;
   1618    if (op != BROTLI_OPERATION_EMIT_METADATA) return BROTLI_FALSE;
   1619  }
   1620 
   1621  if (op == BROTLI_OPERATION_EMIT_METADATA) {
   1622    UpdateSizeHint(s, 0);  /* First data metablock might be emitted here. */
   1623    return ProcessMetadata(
   1624        s, available_in, next_in, available_out, next_out, total_out);
   1625  }
   1626 
   1627  if (s->stream_state_ == BROTLI_STREAM_METADATA_HEAD ||
   1628      s->stream_state_ == BROTLI_STREAM_METADATA_BODY) {
   1629    return BROTLI_FALSE;
   1630  }
   1631 
   1632  if (s->stream_state_ != BROTLI_STREAM_PROCESSING && *available_in != 0) {
   1633    return BROTLI_FALSE;
   1634  }
   1635  if (s->params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY ||
   1636      s->params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY) {
   1637    return BrotliEncoderCompressStreamFast(s, op, available_in, next_in,
   1638        available_out, next_out, total_out);
   1639  }
   1640  while (BROTLI_TRUE) {
   1641    size_t remaining_block_size = RemainingInputBlockSize(s);
   1642    /* Shorten input to flint size. */
   1643    if (s->flint_ >= 0 && remaining_block_size > (size_t)s->flint_) {
   1644      remaining_block_size = (size_t)s->flint_;
   1645    }
   1646 
   1647    if (remaining_block_size != 0 && *available_in != 0) {
   1648      size_t copy_input_size =
   1649          BROTLI_MIN(size_t, remaining_block_size, *available_in);
   1650      CopyInputToRingBuffer(s, copy_input_size, *next_in);
   1651      *next_in += copy_input_size;
   1652      *available_in -= copy_input_size;
   1653      s->total_in_ += copy_input_size;
   1654      if (s->flint_ > 0) s->flint_ = (int8_t)(s->flint_ - (int)copy_input_size);
   1655      continue;
   1656    }
   1657 
   1658    if (InjectFlushOrPushOutput(s, available_out, next_out, total_out)) {
   1659      /* Exit the "emit flint" workflow. */
   1660      if (s->flint_ == BROTLI_FLINT_WAITING_FOR_FLUSHING) {
   1661        CheckFlushComplete(s);
   1662        if (s->stream_state_ == BROTLI_STREAM_PROCESSING) {
   1663          s->flint_ = BROTLI_FLINT_DONE;
   1664        }
   1665      }
   1666      continue;
   1667    }
   1668 
   1669    /* Compress data only when internal output buffer is empty, stream is not
   1670       finished and there is no pending flush request. */
   1671    if (s->available_out_ == 0 &&
   1672        s->stream_state_ == BROTLI_STREAM_PROCESSING) {
   1673      if (remaining_block_size == 0 || op != BROTLI_OPERATION_PROCESS) {
   1674        BROTLI_BOOL is_last = TO_BROTLI_BOOL(
   1675            (*available_in == 0) && op == BROTLI_OPERATION_FINISH);
   1676        BROTLI_BOOL force_flush = TO_BROTLI_BOOL(
   1677            (*available_in == 0) && op == BROTLI_OPERATION_FLUSH);
   1678        BROTLI_BOOL result;
   1679        /* Force emitting (uncompressed) piece containing flint. */
   1680        if (!is_last && s->flint_ == 0) {
   1681          s->flint_ = BROTLI_FLINT_WAITING_FOR_FLUSHING;
   1682          force_flush = BROTLI_TRUE;
   1683        }
   1684        UpdateSizeHint(s, *available_in);
   1685        result = EncodeData(s, is_last, force_flush,
   1686            &s->available_out_, &s->next_out_);
   1687        if (!result) return BROTLI_FALSE;
   1688        if (force_flush) s->stream_state_ = BROTLI_STREAM_FLUSH_REQUESTED;
   1689        if (is_last) s->stream_state_ = BROTLI_STREAM_FINISHED;
   1690        continue;
   1691      }
   1692    }
   1693    break;
   1694  }
   1695  CheckFlushComplete(s);
   1696  return BROTLI_TRUE;
   1697 }
   1698 
   1699 BROTLI_BOOL BrotliEncoderIsFinished(BrotliEncoderState* s) {
   1700  return TO_BROTLI_BOOL(s->stream_state_ == BROTLI_STREAM_FINISHED &&
   1701      !BrotliEncoderHasMoreOutput(s));
   1702 }
   1703 
   1704 BROTLI_BOOL BrotliEncoderHasMoreOutput(BrotliEncoderState* s) {
   1705  return TO_BROTLI_BOOL(s->available_out_ != 0);
   1706 }
   1707 
   1708 const uint8_t* BrotliEncoderTakeOutput(BrotliEncoderState* s, size_t* size) {
   1709  size_t consumed_size = s->available_out_;
   1710  uint8_t* result = s->next_out_;
   1711  if (*size) {
   1712    consumed_size = BROTLI_MIN(size_t, *size, s->available_out_);
   1713  }
   1714  if (consumed_size) {
   1715    s->next_out_ += consumed_size;
   1716    s->available_out_ -= consumed_size;
   1717    s->total_out_ += consumed_size;
   1718    CheckFlushComplete(s);
   1719    *size = consumed_size;
   1720  } else {
   1721    *size = 0;
   1722    result = 0;
   1723  }
   1724  return result;
   1725 }
   1726 
   1727 uint32_t BrotliEncoderVersion(void) {
   1728  return BROTLI_VERSION;
   1729 }
   1730 
   1731 BrotliEncoderPreparedDictionary* BrotliEncoderPrepareDictionary(
   1732    BrotliSharedDictionaryType type, size_t size,
   1733    const uint8_t data[BROTLI_ARRAY_PARAM(size)], int quality,
   1734    brotli_alloc_func alloc_func, brotli_free_func free_func, void* opaque) {
   1735  ManagedDictionary* managed_dictionary = NULL;
   1736  BROTLI_BOOL type_is_known = BROTLI_FALSE;
   1737  type_is_known |= (type == BROTLI_SHARED_DICTIONARY_RAW);
   1738 #if defined(BROTLI_EXPERIMENTAL)
   1739  type_is_known |= (type == BROTLI_SHARED_DICTIONARY_SERIALIZED);
   1740 #endif  /* BROTLI_EXPERIMENTAL */
   1741  if (!type_is_known) {
   1742    return NULL;
   1743  }
   1744  managed_dictionary =
   1745      BrotliCreateManagedDictionary(alloc_func, free_func, opaque);
   1746  if (managed_dictionary == NULL) {
   1747    return NULL;
   1748  }
   1749  if (type == BROTLI_SHARED_DICTIONARY_RAW) {
   1750    managed_dictionary->dictionary = (uint32_t*)CreatePreparedDictionary(
   1751        &managed_dictionary->memory_manager_, data, size);
   1752  }
   1753 #if defined(BROTLI_EXPERIMENTAL)
   1754  if (type == BROTLI_SHARED_DICTIONARY_SERIALIZED) {
   1755    SharedEncoderDictionary* dict = (SharedEncoderDictionary*)BrotliAllocate(
   1756        &managed_dictionary->memory_manager_, sizeof(SharedEncoderDictionary));
   1757    managed_dictionary->dictionary = (uint32_t*)dict;
   1758    if (dict != NULL) {
   1759      BROTLI_BOOL ok = BrotliInitCustomSharedEncoderDictionary(
   1760          &managed_dictionary->memory_manager_, data, size, quality, dict);
   1761      if (!ok) {
   1762        BrotliFree(&managed_dictionary->memory_manager_, dict);
   1763        managed_dictionary->dictionary = NULL;
   1764      }
   1765    }
   1766  }
   1767 #else  /* BROTLI_EXPERIMENTAL */
   1768  (void)quality;
   1769 #endif  /* BROTLI_EXPERIMENTAL */
   1770  if (managed_dictionary->dictionary == NULL) {
   1771    BrotliDestroyManagedDictionary(managed_dictionary);
   1772    return NULL;
   1773  }
   1774  return (BrotliEncoderPreparedDictionary*)managed_dictionary;
   1775 }
   1776 
   1777 void BROTLI_COLD BrotliEncoderDestroyPreparedDictionary(
   1778    BrotliEncoderPreparedDictionary* dictionary) {
   1779  ManagedDictionary* dict = (ManagedDictionary*)dictionary;
   1780  if (!dictionary) return;
   1781  /* First field of dictionary structs. */
   1782  /* Only managed dictionaries are eligible for destruction by this method. */
   1783  if (dict->magic != kManagedDictionaryMagic) {
   1784    return;
   1785  }
   1786  if (dict->dictionary == NULL) {
   1787    /* This should never ever happen. */
   1788  } else if (*dict->dictionary == kLeanPreparedDictionaryMagic) {
   1789    DestroyPreparedDictionary(
   1790        &dict->memory_manager_, (PreparedDictionary*)dict->dictionary);
   1791  } else if (*dict->dictionary == kSharedDictionaryMagic) {
   1792    BrotliCleanupSharedEncoderDictionary(&dict->memory_manager_,
   1793        (SharedEncoderDictionary*)dict->dictionary);
   1794    BrotliFree(&dict->memory_manager_, dict->dictionary);
   1795  } else {
   1796    /* There is also kPreparedDictionaryMagic, but such instances should be
   1797     * constructed and destroyed by different means. */
   1798  }
   1799  dict->dictionary = NULL;
   1800  BrotliDestroyManagedDictionary(dict);
   1801 }
   1802 
   1803 BROTLI_BOOL BROTLI_COLD BrotliEncoderAttachPreparedDictionary(
   1804    BrotliEncoderState* state,
   1805    const BrotliEncoderPreparedDictionary* dictionary) {
   1806  /* First field of dictionary structs */
   1807  const BrotliEncoderPreparedDictionary* dict = dictionary;
   1808  uint32_t magic = *((const uint32_t*)dict);
   1809  SharedEncoderDictionary* current = NULL;
   1810  if (magic == kManagedDictionaryMagic) {
   1811    /* Unwrap managed dictionary. */
   1812    ManagedDictionary* managed_dictionary = (ManagedDictionary*)dict;
   1813    magic = *managed_dictionary->dictionary;
   1814    dict = (BrotliEncoderPreparedDictionary*)managed_dictionary->dictionary;
   1815  }
   1816  current = &state->params.dictionary;
   1817  if (magic == kPreparedDictionaryMagic ||
   1818      magic == kLeanPreparedDictionaryMagic) {
   1819    const PreparedDictionary* prepared = (const PreparedDictionary*)dict;
   1820    if (!AttachPreparedDictionary(&current->compound, prepared)) {
   1821      return BROTLI_FALSE;
   1822    }
   1823  } else if (magic == kSharedDictionaryMagic) {
   1824    const SharedEncoderDictionary* attached =
   1825        (const SharedEncoderDictionary*)dict;
   1826    BROTLI_BOOL was_default = !current->contextual.context_based &&
   1827        current->contextual.num_dictionaries == 1 &&
   1828        current->contextual.dict[0]->hash_table_words ==
   1829        kStaticDictionaryHashWords &&
   1830        current->contextual.dict[0]->hash_table_lengths ==
   1831        kStaticDictionaryHashLengths;
   1832    BROTLI_BOOL new_default = !attached->contextual.context_based &&
   1833        attached->contextual.num_dictionaries == 1 &&
   1834        attached->contextual.dict[0]->hash_table_words ==
   1835        kStaticDictionaryHashWords &&
   1836        attached->contextual.dict[0]->hash_table_lengths ==
   1837        kStaticDictionaryHashLengths;
   1838    size_t i;
   1839    if (state->is_initialized_) return BROTLI_FALSE;
   1840    current->max_quality =
   1841        BROTLI_MIN(int, current->max_quality, attached->max_quality);
   1842    for (i = 0; i < attached->compound.num_chunks; i++) {
   1843      if (!AttachPreparedDictionary(&current->compound,
   1844          attached->compound.chunks[i])) {
   1845        return BROTLI_FALSE;
   1846      }
   1847    }
   1848    if (!new_default) {
   1849      if (!was_default) return BROTLI_FALSE;
   1850      /* Copy by value, but then set num_instances_ to 0 because their memory
   1851      is managed by attached, not by current */
   1852      current->contextual = attached->contextual;
   1853      current->contextual.num_instances_ = 0;
   1854    }
   1855  } else {
   1856    return BROTLI_FALSE;
   1857  }
   1858  return BROTLI_TRUE;
   1859 }
   1860 
   1861 size_t BROTLI_COLD BrotliEncoderEstimatePeakMemoryUsage(int quality, int lgwin,
   1862                                                        size_t input_size) {
   1863  BrotliEncoderParams params;
   1864  size_t memory_manager_slots = BROTLI_ENCODER_MEMORY_MANAGER_SLOTS;
   1865  size_t memory_manager_size = memory_manager_slots * sizeof(void*);
   1866  BrotliEncoderInitParams(&params);
   1867  params.quality = quality;
   1868  params.lgwin = lgwin;
   1869  params.size_hint = input_size;
   1870  params.large_window = lgwin > BROTLI_MAX_WINDOW_BITS;
   1871  SanitizeParams(&params);
   1872  params.lgblock = ComputeLgBlock(&params);
   1873  ChooseHasher(&params, &params.hasher);
   1874  if (params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY ||
   1875      params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY) {
   1876    size_t state_size = sizeof(BrotliEncoderState);
   1877    size_t block_size = BROTLI_MIN(size_t, input_size, ((size_t)1ul << params.lgwin));
   1878    size_t hash_table_size =
   1879        HashTableSize(MaxHashTableSize(params.quality), block_size);
   1880    size_t hash_size =
   1881        (hash_table_size < (1u << 10)) ? 0 : sizeof(int) * hash_table_size;
   1882    size_t cmdbuf_size = params.quality == FAST_TWO_PASS_COMPRESSION_QUALITY ?
   1883        5 * BROTLI_MIN(size_t, block_size, 1ul << 17) : 0;
   1884    if (params.quality == FAST_ONE_PASS_COMPRESSION_QUALITY) {
   1885      state_size += sizeof(BrotliOnePassArena);
   1886    } else {
   1887      state_size += sizeof(BrotliTwoPassArena);
   1888    }
   1889    return hash_size + cmdbuf_size + state_size;
   1890  } else {
   1891    size_t short_ringbuffer_size = (size_t)1 << params.lgblock;
   1892    int ringbuffer_bits = ComputeRbBits(&params);
   1893    size_t ringbuffer_size = input_size < short_ringbuffer_size ?
   1894        input_size : ((size_t)1u << ringbuffer_bits) + short_ringbuffer_size;
   1895    size_t hash_size[4] = {0};
   1896    size_t metablock_size =
   1897        BROTLI_MIN(size_t, input_size, MaxMetablockSize(&params));
   1898    size_t inputblock_size =
   1899        BROTLI_MIN(size_t, input_size, (size_t)1 << params.lgblock);
   1900    size_t cmdbuf_size = metablock_size * 2 + inputblock_size * 6;
   1901    size_t outbuf_size = metablock_size * 2 + 503;
   1902    size_t histogram_size = 0;
   1903    HasherSize(&params, BROTLI_TRUE, input_size, hash_size);
   1904    if (params.quality < MIN_QUALITY_FOR_BLOCK_SPLIT) {
   1905      cmdbuf_size = BROTLI_MIN(size_t, cmdbuf_size,
   1906          MAX_NUM_DELAYED_SYMBOLS * sizeof(Command) + inputblock_size * 12);
   1907    }
   1908    if (params.quality >= MIN_QUALITY_FOR_HQ_BLOCK_SPLITTING) {
   1909      /* Only a very rough estimation, based on enwik8. */
   1910      histogram_size = 200 << 20;
   1911    } else if (params.quality >= MIN_QUALITY_FOR_BLOCK_SPLIT) {
   1912      size_t literal_histograms =
   1913          BROTLI_MIN(size_t, metablock_size / 6144, 256);
   1914      size_t command_histograms =
   1915          BROTLI_MIN(size_t, metablock_size / 6144, 256);
   1916      size_t distance_histograms =
   1917          BROTLI_MIN(size_t, metablock_size / 6144, 256);
   1918      histogram_size = literal_histograms * sizeof(HistogramLiteral) +
   1919                       command_histograms * sizeof(HistogramCommand) +
   1920                       distance_histograms * sizeof(HistogramDistance);
   1921    }
   1922    return (memory_manager_size + ringbuffer_size +
   1923            hash_size[0] + hash_size[1] + hash_size[2] + hash_size[3] +
   1924            cmdbuf_size +
   1925            outbuf_size +
   1926            histogram_size);
   1927  }
   1928 }
   1929 size_t BROTLI_COLD BrotliEncoderGetPreparedDictionarySize(
   1930    const BrotliEncoderPreparedDictionary* prepared_dictionary) {
   1931  /* First field of dictionary structs */
   1932  const BrotliEncoderPreparedDictionary* prepared = prepared_dictionary;
   1933  uint32_t magic = *((const uint32_t*)prepared);
   1934  size_t overhead = 0;
   1935  if (magic == kManagedDictionaryMagic) {
   1936    const ManagedDictionary* managed = (const ManagedDictionary*)prepared;
   1937    overhead = sizeof(ManagedDictionary);
   1938    magic = *managed->dictionary;
   1939    prepared = (const BrotliEncoderPreparedDictionary*)managed->dictionary;
   1940  }
   1941 
   1942  if (magic == kPreparedDictionaryMagic) {
   1943    const PreparedDictionary* dictionary =
   1944        (const PreparedDictionary*)prepared;
   1945    /* Keep in sync with step 3 of CreatePreparedDictionary */
   1946    return sizeof(PreparedDictionary) + dictionary->source_size +
   1947        (sizeof(uint32_t) << dictionary->slot_bits) +
   1948        (sizeof(uint16_t) << dictionary->bucket_bits) +
   1949        (sizeof(uint32_t) * dictionary->num_items) + overhead;
   1950  } else if (magic == kLeanPreparedDictionaryMagic) {
   1951    const PreparedDictionary* dictionary =
   1952        (const PreparedDictionary*)prepared;
   1953    /* Keep in sync with step 3 of CreatePreparedDictionary */
   1954    return sizeof(PreparedDictionary) + sizeof(uint8_t*) +
   1955        (sizeof(uint32_t) << dictionary->slot_bits) +
   1956        (sizeof(uint16_t) << dictionary->bucket_bits) +
   1957        (sizeof(uint32_t) * dictionary->num_items) + overhead;
   1958  } else if (magic == kSharedDictionaryMagic) {
   1959    const SharedEncoderDictionary* dictionary =
   1960        (const SharedEncoderDictionary*)prepared;
   1961    const CompoundDictionary* compound = &dictionary->compound;
   1962    const ContextualEncoderDictionary* contextual = &dictionary->contextual;
   1963    size_t result = sizeof(*dictionary);
   1964    size_t i;
   1965    size_t num_instances;
   1966    const BrotliEncoderDictionary* instances;
   1967    for (i = 0; i < compound->num_prepared_instances_; i++) {
   1968      size_t size = BrotliEncoderGetPreparedDictionarySize(
   1969          (const BrotliEncoderPreparedDictionary*)
   1970          compound->prepared_instances_[i]);
   1971      if (!size) return 0;  /* error */
   1972      result += size;
   1973    }
   1974    if (contextual->context_based) {
   1975      num_instances = contextual->num_instances_;
   1976      instances = contextual->instances_;
   1977      result += sizeof(*instances) * num_instances;
   1978    } else {
   1979      num_instances = 1;
   1980      instances = &contextual->instance_;
   1981    }
   1982    for (i = 0; i < num_instances; i++) {
   1983      const BrotliEncoderDictionary* dict = &instances[i];
   1984      result += dict->trie.pool_capacity * sizeof(BrotliTrieNode);
   1985      if (dict->hash_table_data_words_) {
   1986        result += sizeof(kStaticDictionaryHashWords);
   1987      }
   1988      if (dict->hash_table_data_lengths_) {
   1989        result += sizeof(kStaticDictionaryHashLengths);
   1990      }
   1991      if (dict->buckets_data_) {
   1992        result += sizeof(*dict->buckets_data_) * dict->buckets_alloc_size_;
   1993      }
   1994      if (dict->dict_words_data_) {
   1995        result += sizeof(*dict->dict_words) * dict->dict_words_alloc_size_;
   1996      }
   1997      if (dict->words_instance_) {
   1998        result += sizeof(*dict->words_instance_);
   1999        /* data_size not added here: it is never allocated by the
   2000           SharedEncoderDictionary, instead it always points to the file
   2001           already loaded in memory. So if the caller wants to include
   2002           this memory as well, add the size of the loaded dictionary
   2003           file to this. */
   2004      }
   2005    }
   2006    return result + overhead;
   2007  }
   2008  return 0;  /* error */
   2009 }
   2010 
   2011 #if defined(BROTLI_TEST)
   2012 size_t BrotliMakeUncompressedStreamForTest(const uint8_t*, size_t, uint8_t*);
   2013 size_t BrotliMakeUncompressedStreamForTest(
   2014    const uint8_t* input, size_t input_size, uint8_t* output) {
   2015  return MakeUncompressedStream(input, input_size, output);
   2016 }
   2017 #endif
   2018 
   2019 #if defined(__cplusplus) || defined(c_plusplus)
   2020 }  /* extern "C" */
   2021 #endif