tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

compressor.js (46779B)


      1 // The implementation of the [HTTP/2 Header Compression][http2-compression] spec is separated from
      2 // the 'integration' part which handles HEADERS and PUSH_PROMISE frames. The compression itself is
      3 // implemented in the first part of the file, and consists of three classes: `HeaderTable`,
      4 // `HeaderSetDecompressor` and `HeaderSetCompressor`. The two latter classes are
      5 // [Transform Stream][node-transform] subclasses that operate in [object mode][node-objectmode].
      6 // These transform chunks of binary data into `[name, value]` pairs and vice versa, and store their
      7 // state in `HeaderTable` instances.
      8 //
      9 // The 'integration' part is also implemented by two [Transform Stream][node-transform] subclasses
     10 // that operate in [object mode][node-objectmode]: the `Compressor` and the `Decompressor`. These
     11 // provide a layer between the [framer](framer.html) and the
     12 // [connection handling component](connection.html).
     13 //
     14 // [node-transform]: https://nodejs.org/api/stream.html#stream_class_stream_transform
     15 // [node-objectmode]: https://nodejs.org/api/stream.html#stream_new_stream_readable_options
     16 // [http2-compression]: https://tools.ietf.org/html/rfc7541
     17 
     18 exports.HeaderTable = HeaderTable;
     19 exports.HuffmanTable = HuffmanTable;
     20 exports.HeaderSetCompressor = HeaderSetCompressor;
     21 exports.HeaderSetDecompressor = HeaderSetDecompressor;
     22 exports.Compressor = Compressor;
     23 exports.Decompressor = Decompressor;
     24 
     25 var TransformStream = require('stream').Transform;
     26 var assert = require('assert');
     27 var util = require('util');
     28 
     29 // Header compression
     30 // ==================
     31 
     32 // The HeaderTable class
     33 // ---------------------
     34 
     35 // The [Header Table] is a component used to associate headers to index values. It is basically an
     36 // ordered list of `[name, value]` pairs, so it's implemented as a subclass of `Array`.
     37 // In this implementation, the Header Table and the [Static Table] are handled as a single table.
     38 // [Header Table]: https://tools.ietf.org/html/rfc7541#section-2.3.2
     39 // [Static Table]: https://tools.ietf.org/html/rfc7541#section-2.3.1
     40 function HeaderTable(log, limit) {
     41  var self = HeaderTable.staticTable.map(entryFromPair);
     42  self._log = log;
     43  self._limit = limit || DEFAULT_HEADER_TABLE_LIMIT;
     44  self._staticLength = self.length;
     45  self._size = 0;
     46  self._enforceLimit = HeaderTable.prototype._enforceLimit;
     47  self.add = HeaderTable.prototype.add;
     48  self.setSizeLimit = HeaderTable.prototype.setSizeLimit;
     49  return self;
     50 }
     51 
     52 function entryFromPair(pair) {
     53  var entry = pair.slice();
     54  entry._size = size(entry);
     55  return entry;
     56 }
     57 
     58 // The encoder decides how to update the header table and as such can control how much memory is
     59 // used by the header table.  To limit the memory requirements on the decoder side, the header table
     60 // size is bounded.
     61 //
     62 // * The default header table size limit is 4096 bytes.
     63 // * The size of an entry is defined as follows: the size of an entry is the sum of its name's
     64 //   length in bytes, of its value's length in bytes and of 32 bytes.
     65 // * The size of a header table is the sum of the size of its entries.
     66 var DEFAULT_HEADER_TABLE_LIMIT = 4096;
     67 
     68 function size(entry) {
     69  return (Buffer.from(entry[0] + entry[1], 'utf8')).length + 32;
     70 }
     71 
     72 // The `add(index, entry)` can be used to [manage the header table][tablemgmt]:
     73 // [tablemgmt]: https://tools.ietf.org/html/rfc7541#section-4
     74 //
     75 // * it pushes the new `entry` at the beggining of the table
     76 // * before doing such a modification, it has to be ensured that the header table size will stay
     77 //   lower than or equal to the header table size limit. To achieve this, entries are evicted from
     78 //   the end of the header table until the size of the header table is less than or equal to
     79 //   `(this._limit - entry.size)`, or until the table is empty.
     80 //
     81 //              <----------  Index Address Space ---------->
     82 //              <-- Static  Table -->  <-- Header  Table -->
     83 //              +---+-----------+---+  +---+-----------+---+
     84 //              | 0 |    ...    | k |  |k+1|    ...    | n |
     85 //              +---+-----------+---+  +---+-----------+---+
     86 //                                     ^                   |
     87 //                                     |                   V
     88 //                              Insertion Point       Drop Point
     89 
     90 HeaderTable.prototype._enforceLimit = function _enforceLimit(limit) {
     91  var droppedEntries = [];
     92  while ((this._size > 0) && (this._size > limit)) {
     93    var dropped = this.pop();
     94    this._size -= dropped._size;
     95    droppedEntries.unshift(dropped);
     96  }
     97  return droppedEntries;
     98 };
     99 
    100 HeaderTable.prototype.add = function(entry) {
    101  var limit = this._limit - entry._size;
    102  var droppedEntries = this._enforceLimit(limit);
    103 
    104  if (this._size <= limit) {
    105    this.splice(this._staticLength, 0, entry);
    106    this._size += entry._size;
    107  }
    108 
    109  return droppedEntries;
    110 };
    111 
    112 // The table size limit can be changed externally. In this case, the same eviction algorithm is used
    113 HeaderTable.prototype.setSizeLimit = function setSizeLimit(limit) {
    114  this._limit = limit;
    115  this._enforceLimit(this._limit);
    116 };
    117 
    118 // [The Static Table](https://tools.ietf.org/html/rfc7541#section-2.3.1)
    119 // ------------------
    120 
    121 // The table is generated with feeding the table from the spec to the following sed command:
    122 //
    123 //     sed -re "s/\s*\| [0-9]+\s*\| ([^ ]*)/  [ '\1'/g" -e "s/\|\s([^ ]*)/, '\1'/g" -e 's/ \|/],/g'
    124 
    125 HeaderTable.staticTable  = [
    126  [ ':authority'                  , ''            ],
    127  [ ':method'                     , 'GET'         ],
    128  [ ':method'                     , 'POST'        ],
    129  [ ':path'                       , '/'           ],
    130  [ ':path'                       , '/index.html' ],
    131  [ ':scheme'                     , 'http'        ],
    132  [ ':scheme'                     , 'https'       ],
    133  [ ':status'                     , '200'         ],
    134  [ ':status'                     , '204'         ],
    135  [ ':status'                     , '206'         ],
    136  [ ':status'                     , '304'         ],
    137  [ ':status'                     , '400'         ],
    138  [ ':status'                     , '404'         ],
    139  [ ':status'                     , '500'         ],
    140  [ 'accept-charset'              , ''            ],
    141  [ 'accept-encoding'             , 'gzip, deflate'],
    142  [ 'accept-language'             , ''            ],
    143  [ 'accept-ranges'               , ''            ],
    144  [ 'accept'                      , ''            ],
    145  [ 'access-control-allow-origin' , ''            ],
    146  [ 'age'                         , ''            ],
    147  [ 'allow'                       , ''            ],
    148  [ 'authorization'               , ''            ],
    149  [ 'cache-control'               , ''            ],
    150  [ 'content-disposition'         , ''            ],
    151  [ 'content-encoding'            , ''            ],
    152  [ 'content-language'            , ''            ],
    153  [ 'content-length'              , ''            ],
    154  [ 'content-location'            , ''            ],
    155  [ 'content-range'               , ''            ],
    156  [ 'content-type'                , ''            ],
    157  [ 'cookie'                      , ''            ],
    158  [ 'date'                        , ''            ],
    159  [ 'etag'                        , ''            ],
    160  [ 'expect'                      , ''            ],
    161  [ 'expires'                     , ''            ],
    162  [ 'from'                        , ''            ],
    163  [ 'host'                        , ''            ],
    164  [ 'if-match'                    , ''            ],
    165  [ 'if-modified-since'           , ''            ],
    166  [ 'if-none-match'               , ''            ],
    167  [ 'if-range'                    , ''            ],
    168  [ 'if-unmodified-since'         , ''            ],
    169  [ 'last-modified'               , ''            ],
    170  [ 'link'                        , ''            ],
    171  [ 'location'                    , ''            ],
    172  [ 'max-forwards'                , ''            ],
    173  [ 'proxy-authenticate'          , ''            ],
    174  [ 'proxy-authorization'         , ''            ],
    175  [ 'range'                       , ''            ],
    176  [ 'referer'                     , ''            ],
    177  [ 'refresh'                     , ''            ],
    178  [ 'retry-after'                 , ''            ],
    179  [ 'server'                      , ''            ],
    180  [ 'set-cookie'                  , ''            ],
    181  [ 'strict-transport-security'   , ''            ],
    182  [ 'transfer-encoding'           , ''            ],
    183  [ 'user-agent'                  , ''            ],
    184  [ 'vary'                        , ''            ],
    185  [ 'via'                         , ''            ],
    186  [ 'www-authenticate'            , ''            ]
    187 ];
    188 
    189 // The HeaderSetDecompressor class
    190 // -------------------------------
    191 
    192 // A `HeaderSetDecompressor` instance is a transform stream that can be used to *decompress a
    193 // single header set*. Its input is a stream of binary data chunks and its output is a stream of
    194 // `[name, value]` pairs.
    195 //
    196 // Currently, it is not a proper streaming decompressor implementation, since it buffer its input
    197 // until the end os the stream, and then processes the whole header block at once.
    198 
    199 util.inherits(HeaderSetDecompressor, TransformStream);
    200 function HeaderSetDecompressor(log, table) {
    201  TransformStream.call(this, { objectMode: true });
    202 
    203  this._log = log.child({ component: 'compressor' });
    204  this._table = table;
    205  this._chunks = [];
    206 }
    207 
    208 // `_transform` is the implementation of the [corresponding virtual function][_transform] of the
    209 // TransformStream class. It collects the data chunks for later processing.
    210 // [_transform]: https://nodejs.org/api/stream.html#stream_transform_transform_chunk_encoding_callback
    211 HeaderSetDecompressor.prototype._transform = function _transform(chunk, encoding, callback) {
    212  this._chunks.push(chunk);
    213  callback();
    214 };
    215 
    216 // `execute(rep)` executes the given [header representation][representation].
    217 // [representation]: https://tools.ietf.org/html/rfc7541#section-6
    218 
    219 // The *JavaScript object representation* of a header representation:
    220 //
    221 //     {
    222 //       name: String || Integer,  // string literal or index
    223 //       value: String || Integer, // string literal or index
    224 //       index: Boolean            // with or without indexing
    225 //     }
    226 //
    227 // *Important:* to ease the indexing of the header table, indexes start at 0 instead of 1.
    228 //
    229 // Examples:
    230 //
    231 //     Indexed:
    232 //     { name: 2  , value: 2  , index: false }
    233 //     Literal:
    234 //     { name: 2  , value: 'X', index: false } // without indexing
    235 //     { name: 2  , value: 'Y', index: true  } // with indexing
    236 //     { name: 'A', value: 'Z', index: true  } // with indexing, literal name
    237 HeaderSetDecompressor.prototype._execute = function _execute(rep) {
    238  this._log.trace({ key: rep.name, value: rep.value, index: rep.index },
    239                  'Executing header representation');
    240 
    241  var entry, pair;
    242 
    243  if (rep.contextUpdate) {
    244    this._table.setSizeLimit(rep.newMaxSize);
    245  }
    246 
    247  // * An _indexed representation_ entails the following actions:
    248  //   * The header field corresponding to the referenced entry is emitted
    249  else if (typeof rep.value === 'number') {
    250    var index = rep.value;
    251    entry = this._table[index];
    252 
    253    pair = entry.slice();
    254    this.push(pair);
    255  }
    256 
    257  // * A _literal representation_ that is _not added_ to the header table entails the following
    258  //   action:
    259  //   * The header is emitted.
    260  // * A _literal representation_ that is _added_ to the header table entails the following further
    261  //   actions:
    262  //   * The header is added to the header table.
    263  //   * The header is emitted.
    264  else {
    265    if (typeof rep.name === 'number') {
    266      pair = [this._table[rep.name][0], rep.value];
    267    } else {
    268      pair = [rep.name, rep.value];
    269    }
    270 
    271    if (rep.index) {
    272      entry = entryFromPair(pair);
    273      this._table.add(entry);
    274    }
    275 
    276    this.push(pair);
    277  }
    278 };
    279 
    280 // `_flush` is the implementation of the [corresponding virtual function][_flush] of the
    281 // TransformStream class. The whole decompressing process is done in `_flush`. It gets called when
    282 // the input stream is over.
    283 // [_flush]: https://nodejs.org/api/stream.html#stream_transform_flush_callback
    284 HeaderSetDecompressor.prototype._flush = function _flush(callback) {
    285  var buffer = concat(this._chunks);
    286 
    287  // * processes the header representations
    288  buffer.cursor = 0;
    289  while (buffer.cursor < buffer.length) {
    290    this._execute(HeaderSetDecompressor.header(buffer));
    291  }
    292 
    293  callback();
    294 };
    295 
    296 // The HeaderSetCompressor class
    297 // -----------------------------
    298 
    299 // A `HeaderSetCompressor` instance is a transform stream that can be used to *compress a single
    300 // header set*. Its input is a stream of `[name, value]` pairs and its output is a stream of
    301 // binary data chunks.
    302 //
    303 // It is a real streaming compressor, since it does not wait until the header set is complete.
    304 //
    305 // The compression algorithm is (intentionally) not specified by the spec. Therefore, the current
    306 // compression algorithm can probably be improved in the future.
    307 
    308 util.inherits(HeaderSetCompressor, TransformStream);
    309 function HeaderSetCompressor(log, table) {
    310  TransformStream.call(this, { objectMode: true });
    311 
    312  this._log = log.child({ component: 'compressor' });
    313  this._table = table;
    314  this.push = TransformStream.prototype.push.bind(this);
    315 }
    316 
    317 HeaderSetCompressor.prototype.send = function send(rep) {
    318  this._log.trace({ key: rep.name, value: rep.value, index: rep.index },
    319                  'Emitting header representation');
    320 
    321  if (!rep.chunks) {
    322    rep.chunks = HeaderSetCompressor.header(rep);
    323  }
    324  rep.chunks.forEach(this.push);
    325 };
    326 
    327 // `_transform` is the implementation of the [corresponding virtual function][_transform] of the
    328 // TransformStream class. It processes the input headers one by one:
    329 // [_transform]: https://nodejs.org/api/stream.html#stream_transform_transform_chunk_encoding_callback
    330 HeaderSetCompressor.prototype._transform = function _transform(pair, encoding, callback) {
    331  var name = pair[0].toLowerCase();
    332  var value = pair[1];
    333  var entry, rep;
    334 
    335  // * tries to find full (name, value) or name match in the header table
    336  var nameMatch = -1, fullMatch = -1;
    337  for (var droppedIndex = 0; droppedIndex < this._table.length; droppedIndex++) {
    338    entry = this._table[droppedIndex];
    339    if (entry[0] === name) {
    340      if (entry[1] === value) {
    341        fullMatch = droppedIndex;
    342        break;
    343      } else if (nameMatch === -1) {
    344        nameMatch = droppedIndex;
    345      }
    346    }
    347  }
    348 
    349  var mustNeverIndex = ((name === 'cookie' && value.length < 20) ||
    350                        (name === 'set-cookie' && value.length < 20) ||
    351                        name === 'authorization');
    352 
    353  if (fullMatch !== -1 && !mustNeverIndex) {
    354    this.send({ name: fullMatch, value: fullMatch, index: false });
    355  }
    356 
    357  // * otherwise, it will be a literal representation (with a name index if there's a name match)
    358  else {
    359    entry = entryFromPair(pair);
    360 
    361    var indexing = (entry._size < this._table._limit / 2) && !mustNeverIndex;
    362 
    363    if (indexing) {
    364      this._table.add(entry);
    365    }
    366 
    367    this.send({ name: (nameMatch !== -1) ? nameMatch : name, value: value, index: indexing, mustNeverIndex: mustNeverIndex, contextUpdate: false });
    368  }
    369 
    370  callback();
    371 };
    372 
    373 // `_flush` is the implementation of the [corresponding virtual function][_flush] of the
    374 // TransformStream class. It gets called when there's no more header to compress. The final step:
    375 // [_flush]: https://nodejs.org/api/stream.html#stream_transform_flush_callback
    376 HeaderSetCompressor.prototype._flush = function _flush(callback) {
    377  callback();
    378 };
    379 
    380 // [Detailed Format](https://tools.ietf.org/html/rfc7541#section-5)
    381 // -----------------
    382 
    383 // ### Integer representation ###
    384 //
    385 // The algorithm to represent an integer I is as follows:
    386 //
    387 // 1. If I < 2^N - 1, encode I on N bits
    388 // 2. Else, encode 2^N - 1 on N bits and do the following steps:
    389 //    1. Set I to (I - (2^N - 1)) and Q to 1
    390 //    2. While Q > 0
    391 //       1. Compute Q and R, quotient and remainder of I divided by 2^7
    392 //       2. If Q is strictly greater than 0, write one 1 bit; otherwise, write one 0 bit
    393 //       3. Encode R on the next 7 bits
    394 //       4. I = Q
    395 
    396 HeaderSetCompressor.integer = function writeInteger(I, N) {
    397  var limit = Math.pow(2,N) - 1;
    398  if (I < limit) {
    399    return [Buffer.from([I])];
    400  }
    401 
    402  var bytes = [];
    403  if (N !== 0) {
    404    bytes.push(limit);
    405  }
    406  I -= limit;
    407 
    408  var Q = 1, R;
    409  while (Q > 0) {
    410    Q = Math.floor(I / 128);
    411    R = I % 128;
    412 
    413    if (Q > 0) {
    414      R += 128;
    415    }
    416    bytes.push(R);
    417 
    418    I = Q;
    419  }
    420 
    421  return [Buffer.from(bytes)];
    422 };
    423 
    424 // The inverse algorithm:
    425 //
    426 // 1. Set I to the number coded on the lower N bits of the first byte
    427 // 2. If I is smaller than 2^N - 1 then return I
    428 // 2. Else the number is encoded on more than one byte, so do the following steps:
    429 //    1. Set M to 0
    430 //    2. While returning with I
    431 //       1. Let B be the next byte (the first byte if N is 0)
    432 //       2. Read out the lower 7 bits of B and multiply it with 2^M
    433 //       3. Increase I with this number
    434 //       4. Increase M by 7
    435 //       5. Return I if the most significant bit of B is 0
    436 
    437 HeaderSetDecompressor.integer = function readInteger(buffer, N) {
    438  var limit = Math.pow(2,N) - 1;
    439 
    440  var I = buffer[buffer.cursor] & limit;
    441  if (N !== 0) {
    442    buffer.cursor += 1;
    443  }
    444 
    445  if (I === limit) {
    446    var M = 0;
    447    do {
    448      I += (buffer[buffer.cursor] & 127) << M;
    449      M += 7;
    450      buffer.cursor += 1;
    451    } while (buffer[buffer.cursor - 1] & 128);
    452  }
    453 
    454  return I;
    455 };
    456 
    457 // ### Huffman Encoding ###
    458 
    459 function HuffmanTable(table) {
    460  function createTree(codes, position) {
    461    if (codes.length === 1) {
    462      return [table.indexOf(codes[0])];
    463    }
    464 
    465    else {
    466      position = position || 0;
    467      var zero = [];
    468      var one = [];
    469      for (var i = 0; i < codes.length; i++) {
    470        var string = codes[i];
    471        if (string[position] === '0') {
    472          zero.push(string);
    473        } else {
    474          one.push(string);
    475        }
    476      }
    477      return [createTree(zero, position + 1), createTree(one, position + 1)];
    478    }
    479  }
    480 
    481  this.tree = createTree(table);
    482 
    483  this.codes = table.map(function(bits) {
    484    return parseInt(bits, 2);
    485  });
    486  this.lengths = table.map(function(bits) {
    487    return bits.length;
    488  });
    489 }
    490 
    491 HuffmanTable.prototype.encode = function encode(buffer) {
    492  var result = [];
    493  var space = 8;
    494 
    495  function add(data) {
    496    if (space === 8) {
    497      result.push(data);
    498    } else {
    499      result[result.length - 1] |= data;
    500    }
    501  }
    502 
    503  for (var i = 0; i < buffer.length; i++) {
    504    var byte = buffer[i];
    505    var code = this.codes[byte];
    506    var length = this.lengths[byte];
    507 
    508    while (length !== 0) {
    509      if (space >= length) {
    510        add(code << (space - length));
    511        code = 0;
    512        space -= length;
    513        length = 0;
    514      } else {
    515        var shift = length - space;
    516        var msb = code >> shift;
    517        add(msb);
    518        code -= msb << shift;
    519        length -= space;
    520        space = 0;
    521      }
    522 
    523      if (space === 0) {
    524        space = 8;
    525      }
    526    }
    527  }
    528 
    529  if (space !== 8) {
    530    add(this.codes[256] >> (this.lengths[256] - space));
    531  }
    532 
    533  return Buffer.from(result);
    534 };
    535 
    536 HuffmanTable.prototype.decode = function decode(buffer) {
    537  var result = [];
    538  var subtree = this.tree;
    539 
    540  for (var i = 0; i < buffer.length; i++) {
    541    var byte = buffer[i];
    542 
    543    for (var j = 0; j < 8; j++) {
    544      var bit = (byte & 128) ? 1 : 0;
    545      byte = byte << 1;
    546 
    547      subtree = subtree[bit];
    548      if (subtree.length === 1) {
    549        result.push(subtree[0]);
    550        subtree = this.tree;
    551      }
    552    }
    553  }
    554 
    555  return Buffer.from(result);
    556 };
    557 
    558 // The initializer arrays for the Huffman tables are generated with feeding the tables from the
    559 // spec to this sed command:
    560 //
    561 //     sed -e "s/^.* [|]//g" -e "s/|//g" -e "s/ .*//g" -e "s/^/  '/g" -e "s/$/',/g"
    562 
    563 HuffmanTable.huffmanTable = new HuffmanTable([
    564  '1111111111000',
    565  '11111111111111111011000',
    566  '1111111111111111111111100010',
    567  '1111111111111111111111100011',
    568  '1111111111111111111111100100',
    569  '1111111111111111111111100101',
    570  '1111111111111111111111100110',
    571  '1111111111111111111111100111',
    572  '1111111111111111111111101000',
    573  '111111111111111111101010',
    574  '111111111111111111111111111100',
    575  '1111111111111111111111101001',
    576  '1111111111111111111111101010',
    577  '111111111111111111111111111101',
    578  '1111111111111111111111101011',
    579  '1111111111111111111111101100',
    580  '1111111111111111111111101101',
    581  '1111111111111111111111101110',
    582  '1111111111111111111111101111',
    583  '1111111111111111111111110000',
    584  '1111111111111111111111110001',
    585  '1111111111111111111111110010',
    586  '111111111111111111111111111110',
    587  '1111111111111111111111110011',
    588  '1111111111111111111111110100',
    589  '1111111111111111111111110101',
    590  '1111111111111111111111110110',
    591  '1111111111111111111111110111',
    592  '1111111111111111111111111000',
    593  '1111111111111111111111111001',
    594  '1111111111111111111111111010',
    595  '1111111111111111111111111011',
    596  '010100',
    597  '1111111000',
    598  '1111111001',
    599  '111111111010',
    600  '1111111111001',
    601  '010101',
    602  '11111000',
    603  '11111111010',
    604  '1111111010',
    605  '1111111011',
    606  '11111001',
    607  '11111111011',
    608  '11111010',
    609  '010110',
    610  '010111',
    611  '011000',
    612  '00000',
    613  '00001',
    614  '00010',
    615  '011001',
    616  '011010',
    617  '011011',
    618  '011100',
    619  '011101',
    620  '011110',
    621  '011111',
    622  '1011100',
    623  '11111011',
    624  '111111111111100',
    625  '100000',
    626  '111111111011',
    627  '1111111100',
    628  '1111111111010',
    629  '100001',
    630  '1011101',
    631  '1011110',
    632  '1011111',
    633  '1100000',
    634  '1100001',
    635  '1100010',
    636  '1100011',
    637  '1100100',
    638  '1100101',
    639  '1100110',
    640  '1100111',
    641  '1101000',
    642  '1101001',
    643  '1101010',
    644  '1101011',
    645  '1101100',
    646  '1101101',
    647  '1101110',
    648  '1101111',
    649  '1110000',
    650  '1110001',
    651  '1110010',
    652  '11111100',
    653  '1110011',
    654  '11111101',
    655  '1111111111011',
    656  '1111111111111110000',
    657  '1111111111100',
    658  '11111111111100',
    659  '100010',
    660  '111111111111101',
    661  '00011',
    662  '100011',
    663  '00100',
    664  '100100',
    665  '00101',
    666  '100101',
    667  '100110',
    668  '100111',
    669  '00110',
    670  '1110100',
    671  '1110101',
    672  '101000',
    673  '101001',
    674  '101010',
    675  '00111',
    676  '101011',
    677  '1110110',
    678  '101100',
    679  '01000',
    680  '01001',
    681  '101101',
    682  '1110111',
    683  '1111000',
    684  '1111001',
    685  '1111010',
    686  '1111011',
    687  '111111111111110',
    688  '11111111100',
    689  '11111111111101',
    690  '1111111111101',
    691  '1111111111111111111111111100',
    692  '11111111111111100110',
    693  '1111111111111111010010',
    694  '11111111111111100111',
    695  '11111111111111101000',
    696  '1111111111111111010011',
    697  '1111111111111111010100',
    698  '1111111111111111010101',
    699  '11111111111111111011001',
    700  '1111111111111111010110',
    701  '11111111111111111011010',
    702  '11111111111111111011011',
    703  '11111111111111111011100',
    704  '11111111111111111011101',
    705  '11111111111111111011110',
    706  '111111111111111111101011',
    707  '11111111111111111011111',
    708  '111111111111111111101100',
    709  '111111111111111111101101',
    710  '1111111111111111010111',
    711  '11111111111111111100000',
    712  '111111111111111111101110',
    713  '11111111111111111100001',
    714  '11111111111111111100010',
    715  '11111111111111111100011',
    716  '11111111111111111100100',
    717  '111111111111111011100',
    718  '1111111111111111011000',
    719  '11111111111111111100101',
    720  '1111111111111111011001',
    721  '11111111111111111100110',
    722  '11111111111111111100111',
    723  '111111111111111111101111',
    724  '1111111111111111011010',
    725  '111111111111111011101',
    726  '11111111111111101001',
    727  '1111111111111111011011',
    728  '1111111111111111011100',
    729  '11111111111111111101000',
    730  '11111111111111111101001',
    731  '111111111111111011110',
    732  '11111111111111111101010',
    733  '1111111111111111011101',
    734  '1111111111111111011110',
    735  '111111111111111111110000',
    736  '111111111111111011111',
    737  '1111111111111111011111',
    738  '11111111111111111101011',
    739  '11111111111111111101100',
    740  '111111111111111100000',
    741  '111111111111111100001',
    742  '1111111111111111100000',
    743  '111111111111111100010',
    744  '11111111111111111101101',
    745  '1111111111111111100001',
    746  '11111111111111111101110',
    747  '11111111111111111101111',
    748  '11111111111111101010',
    749  '1111111111111111100010',
    750  '1111111111111111100011',
    751  '1111111111111111100100',
    752  '11111111111111111110000',
    753  '1111111111111111100101',
    754  '1111111111111111100110',
    755  '11111111111111111110001',
    756  '11111111111111111111100000',
    757  '11111111111111111111100001',
    758  '11111111111111101011',
    759  '1111111111111110001',
    760  '1111111111111111100111',
    761  '11111111111111111110010',
    762  '1111111111111111101000',
    763  '1111111111111111111101100',
    764  '11111111111111111111100010',
    765  '11111111111111111111100011',
    766  '11111111111111111111100100',
    767  '111111111111111111111011110',
    768  '111111111111111111111011111',
    769  '11111111111111111111100101',
    770  '111111111111111111110001',
    771  '1111111111111111111101101',
    772  '1111111111111110010',
    773  '111111111111111100011',
    774  '11111111111111111111100110',
    775  '111111111111111111111100000',
    776  '111111111111111111111100001',
    777  '11111111111111111111100111',
    778  '111111111111111111111100010',
    779  '111111111111111111110010',
    780  '111111111111111100100',
    781  '111111111111111100101',
    782  '11111111111111111111101000',
    783  '11111111111111111111101001',
    784  '1111111111111111111111111101',
    785  '111111111111111111111100011',
    786  '111111111111111111111100100',
    787  '111111111111111111111100101',
    788  '11111111111111101100',
    789  '111111111111111111110011',
    790  '11111111111111101101',
    791  '111111111111111100110',
    792  '1111111111111111101001',
    793  '111111111111111100111',
    794  '111111111111111101000',
    795  '11111111111111111110011',
    796  '1111111111111111101010',
    797  '1111111111111111101011',
    798  '1111111111111111111101110',
    799  '1111111111111111111101111',
    800  '111111111111111111110100',
    801  '111111111111111111110101',
    802  '11111111111111111111101010',
    803  '11111111111111111110100',
    804  '11111111111111111111101011',
    805  '111111111111111111111100110',
    806  '11111111111111111111101100',
    807  '11111111111111111111101101',
    808  '111111111111111111111100111',
    809  '111111111111111111111101000',
    810  '111111111111111111111101001',
    811  '111111111111111111111101010',
    812  '111111111111111111111101011',
    813  '1111111111111111111111111110',
    814  '111111111111111111111101100',
    815  '111111111111111111111101101',
    816  '111111111111111111111101110',
    817  '111111111111111111111101111',
    818  '111111111111111111111110000',
    819  '11111111111111111111101110',
    820  '111111111111111111111111111111'
    821 ]);
    822 
    823 // ### String literal representation ###
    824 //
    825 // Literal **strings** can represent header names or header values. There's two variant of the
    826 // string encoding:
    827 //
    828 // String literal with Huffman encoding:
    829 //
    830 //       0   1   2   3   4   5   6   7
    831 //     +---+---+---+---+---+---+---+---+
    832 //     | 1 |  Value Length Prefix (7)  |
    833 //     +---+---+---+---+---+---+---+---+
    834 //     |   Value Length (0-N bytes)    |
    835 //     +---+---+---+---+---+---+---+---+
    836 //     ...
    837 //     +---+---+---+---+---+---+---+---+
    838 //     | Huffman Encoded Data  |Padding|
    839 //     +---+---+---+---+---+---+---+---+
    840 //
    841 // String literal without Huffman encoding:
    842 //
    843 //       0   1   2   3   4   5   6   7
    844 //     +---+---+---+---+---+---+---+---+
    845 //     | 0 |  Value Length Prefix (7)  |
    846 //     +---+---+---+---+---+---+---+---+
    847 //     |   Value Length (0-N bytes)    |
    848 //     +---+---+---+---+---+---+---+---+
    849 //     ...
    850 //     +---+---+---+---+---+---+---+---+
    851 //     |  Field Bytes Without Encoding |
    852 //     +---+---+---+---+---+---+---+---+
    853 
    854 HeaderSetCompressor.string = function writeString(str) {
    855  str = Buffer.from(str, 'utf8');
    856 
    857  var huffman = HuffmanTable.huffmanTable.encode(str);
    858  if (huffman.length < str.length) {
    859    var length = HeaderSetCompressor.integer(huffman.length, 7);
    860    length[0][0] |= 128;
    861    return length.concat(huffman);
    862  }
    863 
    864  else {
    865    length = HeaderSetCompressor.integer(str.length, 7);
    866    return length.concat(str);
    867  }
    868 };
    869 
    870 HeaderSetDecompressor.string = function readString(buffer) {
    871  var huffman = buffer[buffer.cursor] & 128;
    872  var length = HeaderSetDecompressor.integer(buffer, 7);
    873  var encoded = buffer.slice(buffer.cursor, buffer.cursor + length);
    874  buffer.cursor += length;
    875  return (huffman ? HuffmanTable.huffmanTable.decode(encoded) : encoded).toString('utf8');
    876 };
    877 
    878 // ### Header represenations ###
    879 
    880 // The JavaScript object representation is described near the
    881 // `HeaderSetDecompressor.prototype._execute()` method definition.
    882 //
    883 // **All binary header representations** start with a prefix signaling the representation type and
    884 // an index represented using prefix coded integers:
    885 //
    886 //       0   1   2   3   4   5   6   7
    887 //     +---+---+---+---+---+---+---+---+
    888 //     | 1 |        Index (7+)         |  Indexed Representation
    889 //     +---+---------------------------+
    890 //
    891 //       0   1   2   3   4   5   6   7
    892 //     +---+---+---+---+---+---+---+---+
    893 //     | 0 | 1 |      Index (6+)       |
    894 //     +---+---+---+-------------------+  Literal w/ Indexing
    895 //     |       Value Length (8+)       |
    896 //     +-------------------------------+  w/ Indexed Name
    897 //     | Value String (Length octets)  |
    898 //     +-------------------------------+
    899 //
    900 //       0   1   2   3   4   5   6   7
    901 //     +---+---+---+---+---+---+---+---+
    902 //     | 0 | 1 |           0           |
    903 //     +---+---+---+-------------------+
    904 //     |       Name Length (8+)        |
    905 //     +-------------------------------+  Literal w/ Indexing
    906 //     |  Name String (Length octets)  |
    907 //     +-------------------------------+  w/ New Name
    908 //     |       Value Length (8+)       |
    909 //     +-------------------------------+
    910 //     | Value String (Length octets)  |
    911 //     +-------------------------------+
    912 //
    913 //       0   1   2   3   4   5   6   7
    914 //     +---+---+---+---+---+---+---+---+
    915 //     | 0 | 0 | 0 | 0 |  Index (4+)   |
    916 //     +---+---+---+-------------------+  Literal w/o Incremental Indexing
    917 //     |       Value Length (8+)       |
    918 //     +-------------------------------+  w/ Indexed Name
    919 //     | Value String (Length octets)  |
    920 //     +-------------------------------+
    921 //
    922 //       0   1   2   3   4   5   6   7
    923 //     +---+---+---+---+---+---+---+---+
    924 //     | 0 | 0 | 0 | 0 |       0       |
    925 //     +---+---+---+-------------------+
    926 //     |       Name Length (8+)        |
    927 //     +-------------------------------+  Literal w/o Incremental Indexing
    928 //     |  Name String (Length octets)  |
    929 //     +-------------------------------+  w/ New Name
    930 //     |       Value Length (8+)       |
    931 //     +-------------------------------+
    932 //     | Value String (Length octets)  |
    933 //     +-------------------------------+
    934 //
    935 //       0   1   2   3   4   5   6   7
    936 //     +---+---+---+---+---+---+---+---+
    937 //     | 0 | 0 | 0 | 1 |  Index (4+)   |
    938 //     +---+---+---+-------------------+  Literal never indexed
    939 //     |       Value Length (8+)       |
    940 //     +-------------------------------+  w/ Indexed Name
    941 //     | Value String (Length octets)  |
    942 //     +-------------------------------+
    943 //
    944 //       0   1   2   3   4   5   6   7
    945 //     +---+---+---+---+---+---+---+---+
    946 //     | 0 | 0 | 0 | 1 |       0       |
    947 //     +---+---+---+-------------------+
    948 //     |       Name Length (8+)        |
    949 //     +-------------------------------+  Literal never indexed
    950 //     |  Name String (Length octets)  |
    951 //     +-------------------------------+  w/ New Name
    952 //     |       Value Length (8+)       |
    953 //     +-------------------------------+
    954 //     | Value String (Length octets)  |
    955 //     +-------------------------------+
    956 //
    957 // The **Indexed Representation** consists of the 1-bit prefix and the Index that is represented as
    958 // a 7-bit prefix coded integer and nothing else.
    959 //
    960 // After the first bits, **all literal representations** specify the header name, either as a
    961 // pointer to the Header Table (Index) or a string literal. When the string literal representation
    962 // is used, the Index is set to 0 and the string literal starts at the second byte.
    963 //
    964 // For **all literal representations**, the specification of the header value comes next. It is
    965 // always represented as a string.
    966 
    967 var representations = {
    968  indexed             : { prefix: 7, pattern: 0x80 },
    969  literalIncremental  : { prefix: 6, pattern: 0x40 },
    970  contextUpdate       : { prefix: 0, pattern: 0x20 },
    971  literalNeverIndexed : { prefix: 4, pattern: 0x10 },
    972  literal             : { prefix: 4, pattern: 0x00 }
    973 };
    974 
    975 HeaderSetCompressor.header = function writeHeader(header) {
    976  var representation, buffers = [];
    977 
    978  if (header.contextUpdate) {
    979    representation = representations.contextUpdate;
    980  } else if (typeof header.value === 'number') {
    981    representation = representations.indexed;
    982  } else if (header.index) {
    983    representation = representations.literalIncremental;
    984  } else if (header.mustNeverIndex) {
    985    representation = representations.literalNeverIndexed;
    986  } else {
    987    representation = representations.literal;
    988  }
    989 
    990  if (representation === representations.contextUpdate) {
    991    buffers.push(HeaderSetCompressor.integer(header.newMaxSize, 5));
    992  }
    993 
    994  else if (representation === representations.indexed) {
    995    buffers.push(HeaderSetCompressor.integer(header.value + 1, representation.prefix));
    996  }
    997 
    998  else {
    999    if (typeof header.name === 'number') {
   1000      buffers.push(HeaderSetCompressor.integer(header.name + 1, representation.prefix));
   1001    } else {
   1002      buffers.push(HeaderSetCompressor.integer(0, representation.prefix));
   1003      buffers.push(HeaderSetCompressor.string(header.name));
   1004    }
   1005    buffers.push(HeaderSetCompressor.string(header.value));
   1006  }
   1007 
   1008  buffers[0][0][0] |= representation.pattern;
   1009 
   1010  return Array.prototype.concat.apply([], buffers); // array of arrays of buffers -> array of buffers
   1011 };
   1012 
   1013 HeaderSetDecompressor.header = function readHeader(buffer) {
   1014  var representation, header = {};
   1015 
   1016  var firstByte = buffer[buffer.cursor];
   1017  if (firstByte & 0x80) {
   1018    representation = representations.indexed;
   1019  } else if (firstByte & 0x40) {
   1020    representation = representations.literalIncremental;
   1021  } else if (firstByte & 0x20) {
   1022    representation = representations.contextUpdate;
   1023  } else if (firstByte & 0x10) {
   1024    representation = representations.literalNeverIndexed;
   1025  } else {
   1026    representation = representations.literal;
   1027  }
   1028 
   1029  header.value = header.name = -1;
   1030  header.index = false;
   1031  header.contextUpdate = false;
   1032  header.newMaxSize = 0;
   1033  header.mustNeverIndex = false;
   1034 
   1035  if (representation === representations.contextUpdate) {
   1036    header.contextUpdate = true;
   1037    header.newMaxSize = HeaderSetDecompressor.integer(buffer, 5);
   1038  }
   1039 
   1040  else if (representation === representations.indexed) {
   1041    header.value = header.name = HeaderSetDecompressor.integer(buffer, representation.prefix) - 1;
   1042  }
   1043 
   1044  else {
   1045    header.name = HeaderSetDecompressor.integer(buffer, representation.prefix) - 1;
   1046    if (header.name === -1) {
   1047      header.name = HeaderSetDecompressor.string(buffer);
   1048    }
   1049    header.value = HeaderSetDecompressor.string(buffer);
   1050    header.index = (representation === representations.literalIncremental);
   1051    header.mustNeverIndex = (representation === representations.literalNeverIndexed);
   1052  }
   1053 
   1054  return header;
   1055 };
   1056 
   1057 // Integration with HTTP/2
   1058 // =======================
   1059 
   1060 // This section describes the interaction between the compressor/decompressor and the rest of the
   1061 // HTTP/2 implementation. The `Compressor` and the `Decompressor` makes up a layer between the
   1062 // [framer](framer.html) and the [connection handling component](connection.html). They let most
   1063 // frames pass through, except HEADERS and PUSH_PROMISE frames. They convert the frames between
   1064 // these two representations:
   1065 //
   1066 //     {                                   {
   1067 //      type: 'HEADERS',                    type: 'HEADERS',
   1068 //      flags: {},                          flags: {},
   1069 //      stream: 1,               <===>      stream: 1,
   1070 //      headers: {                          data: Buffer
   1071 //       N1: 'V1',                         }
   1072 //       N2: ['V1', 'V2', ...],
   1073 //       // ...
   1074 //      }
   1075 //     }
   1076 //
   1077 // There are possibly several binary frame that belong to a single non-binary frame.
   1078 
   1079 var MAX_HTTP_PAYLOAD_SIZE = 16384;
   1080 
   1081 // The Compressor class
   1082 // --------------------
   1083 
   1084 // The Compressor transform stream is basically stateless.
   1085 util.inherits(Compressor, TransformStream);
   1086 function Compressor(log, type) {
   1087  TransformStream.call(this, { objectMode: true });
   1088 
   1089  this._log = log.child({ component: 'compressor' });
   1090 
   1091  assert((type === 'REQUEST') || (type === 'RESPONSE'));
   1092  this._table = new HeaderTable(this._log);
   1093 
   1094  this.tableSizeChangePending = false;
   1095  this.lowestTableSizePending = 0;
   1096  this.tableSizeSetting = DEFAULT_HEADER_TABLE_LIMIT;
   1097 }
   1098 
   1099 // Changing the header table size
   1100 Compressor.prototype.setTableSizeLimit = function setTableSizeLimit(size) {
   1101  this._table.setSizeLimit(size);
   1102  if (!this.tableSizeChangePending || size < this.lowestTableSizePending) {
   1103    this.lowestTableSizePending = size;
   1104  }
   1105  this.tableSizeSetting = size;
   1106  this.tableSizeChangePending = true;
   1107 };
   1108 
   1109 // `compress` takes a header set, and compresses it using a new `HeaderSetCompressor` stream
   1110 // instance. This means that from now on, the advantages of streaming header encoding are lost,
   1111 // but the API becomes simpler.
   1112 Compressor.prototype.compress = function compress(headers) {
   1113  var compressor = new HeaderSetCompressor(this._log, this._table);
   1114 
   1115  if (this.tableSizeChangePending) {
   1116    if (this.lowestTableSizePending < this.tableSizeSetting) {
   1117      compressor.send({contextUpdate: true, newMaxSize: this.lowestTableSizePending,
   1118                       name: "", value: "", index: 0});
   1119    }
   1120    compressor.send({contextUpdate: true, newMaxSize: this.tableSizeSetting,
   1121                     name: "", value: "", index: 0});
   1122    this.tableSizeChangePending = false;
   1123  }
   1124  var colonHeaders = [];
   1125  var nonColonHeaders = [];
   1126 
   1127  // To ensure we send colon headers first
   1128  for (var name in headers) {
   1129    if (name.trim()[0] === ':') {
   1130      colonHeaders.push(name);
   1131    } else {
   1132      nonColonHeaders.push(name);
   1133    }
   1134  }
   1135 
   1136  function compressHeader(name) {
   1137    var value = headers[name];
   1138    name = String(name).toLowerCase();
   1139 
   1140    // * To allow for better compression efficiency, the Cookie header field MAY be split into
   1141    //   separate header fields, each with one or more cookie-pairs.
   1142    if (name == 'cookie') {
   1143      if (!(value instanceof Array)) {
   1144        value = [value];
   1145      }
   1146      value = Array.prototype.concat.apply([], value.map(function(cookie) {
   1147        return String(cookie).split(';').map(trim);
   1148      }));
   1149    }
   1150 
   1151    if (value instanceof Array) {
   1152      for (var i = 0; i < value.length; i++) {
   1153        compressor.write([name, String(value[i])]);
   1154      }
   1155    } else {
   1156      compressor.write([name, String(value)]);
   1157    }
   1158  }
   1159 
   1160  colonHeaders.forEach(compressHeader);
   1161  nonColonHeaders.forEach(compressHeader);
   1162 
   1163  compressor.end();
   1164 
   1165  var chunk, chunks = [];
   1166  while (chunk = compressor.read()) {
   1167    chunks.push(chunk);
   1168  }
   1169 
   1170  function insertSoftIllegalHpack(originalCompressed) {
   1171    var illegalLiteral = Buffer.from([
   1172      0x00, // Literal, no index
   1173      0x08, // Name: not huffman encoded, 8 bytes long
   1174      0x3a,
   1175      0x69,
   1176      0x6c,
   1177      0x6c,
   1178      0x65,
   1179      0x67,
   1180      0x61,
   1181      0x6c, // :illegal
   1182      0x10, // Value: not huffman encoded, 16 bytes long
   1183      // REALLY NOT LEGAL
   1184      0x52,
   1185      0x45,
   1186      0x41,
   1187      0x4c,
   1188      0x4c,
   1189      0x59,
   1190      0x20,
   1191      0x4e,
   1192      0x4f,
   1193      0x54,
   1194      0x20,
   1195      0x4c,
   1196      0x45,
   1197      0x47,
   1198      0x41,
   1199      0x4c,
   1200    ]);
   1201    var newBufferLength = originalCompressed.length + illegalLiteral.length;
   1202    var concatenated = Buffer.alloc(newBufferLength);
   1203    originalCompressed.copy(concatenated, 0);
   1204    illegalLiteral.copy(concatenated, originalCompressed.length);
   1205    return concatenated;
   1206  }
   1207 
   1208  function insertHardIllegalHpack(originalCompressed) {
   1209    // Now we have to add an invalid header
   1210    var illegalIndexed = HeaderSetCompressor.integer(5000, 7);
   1211    // The above returns an array of buffers, but there's only one buffer, so
   1212    // get rid of the array.
   1213    illegalIndexed = illegalIndexed[0];
   1214    // Set the first bit to 1 to signal this is an indexed representation
   1215    illegalIndexed[0] |= 0x80;
   1216    var newBufferLength = originalCompressed.length + illegalIndexed.length;
   1217    var concatenated = Buffer.alloc(newBufferLength);
   1218    originalCompressed.copy(concatenated, 0);
   1219    illegalIndexed.copy(concatenated, originalCompressed.length);
   1220    return concatenated;
   1221  }
   1222 
   1223  if ("x-softillegalhpack" in headers) {
   1224    return insertSoftIllegalHpack(concat(chunks));
   1225  }
   1226 
   1227  if ("x-hardillegalhpack" in headers) {
   1228    return insertHardIllegalHpack(concat(chunks));
   1229  }
   1230 
   1231  return concat(chunks);
   1232 };
   1233 
   1234 // When a `frame` arrives
   1235 Compressor.prototype._transform = function _transform(frame, encoding, done) {
   1236  // * and it is a HEADERS or PUSH_PROMISE frame
   1237  //   * it generates a header block using the compress method
   1238  //   * cuts the header block into `chunks` that are not larger than `MAX_HTTP_PAYLOAD_SIZE`
   1239  //   * for each chunk, it pushes out a chunk frame that is identical to the original, except
   1240  //     the `data` property which holds the given chunk, the type of the frame which is always
   1241  //     CONTINUATION except for the first frame, and the END_HEADERS/END_PUSH_STREAM flag that
   1242  //     marks the last frame and the END_STREAM flag which is always false before the end
   1243  if (frame.type === 'HEADERS' || frame.type === 'PUSH_PROMISE') {
   1244    var buffer = this.compress(frame.headers);
   1245 
   1246    // This will result in CONTINUATIONs from a PUSH_PROMISE being 4 bytes shorter than they could
   1247    // be, but that's not the end of the world, and it prevents us from going over MAX_HTTP_PAYLOAD_SIZE
   1248    // on the initial PUSH_PROMISE frame.
   1249    var adjustment = frame.type === 'PUSH_PROMISE' ? 4 : 0;
   1250    var chunks = cut(buffer, MAX_HTTP_PAYLOAD_SIZE - adjustment);
   1251 
   1252    for (var i = 0; i < chunks.length; i++) {
   1253      var chunkFrame;
   1254      var first = (i === 0);
   1255      var last = (i === chunks.length - 1);
   1256 
   1257      if (first) {
   1258        chunkFrame = util._extend({}, frame);
   1259        chunkFrame.flags = util._extend({}, frame.flags);
   1260        chunkFrame.flags['END_' + frame.type] = last;
   1261      } else {
   1262        chunkFrame = {
   1263          type: 'CONTINUATION',
   1264          flags: { END_HEADERS: last },
   1265          stream: frame.stream
   1266        };
   1267      }
   1268      chunkFrame.data = chunks[i];
   1269 
   1270      this.push(chunkFrame);
   1271    }
   1272  }
   1273 
   1274  // * otherwise, the frame is forwarded without taking any action
   1275  else {
   1276    this.push(frame);
   1277  }
   1278 
   1279  done();
   1280 };
   1281 
   1282 // The Decompressor class
   1283 // ----------------------
   1284 
   1285 // The Decompressor is a stateful transform stream, since it has to collect multiple frames first,
   1286 // and the decoding comes after unifying the payload of those frames.
   1287 //
   1288 // If there's a frame in progress, `this._inProgress` is `true`. The frames are collected in
   1289 // `this._frames`, and the type of the frame and the stream identifier is stored in `this._type`
   1290 // and `this._stream` respectively.
   1291 util.inherits(Decompressor, TransformStream);
   1292 function Decompressor(log, type) {
   1293  TransformStream.call(this, { objectMode: true });
   1294 
   1295  this._log = log.child({ component: 'compressor' });
   1296 
   1297  assert((type === 'REQUEST') || (type === 'RESPONSE'));
   1298  this._table = new HeaderTable(this._log);
   1299 
   1300  this._inProgress = false;
   1301  this._base = undefined;
   1302 }
   1303 
   1304 // Changing the header table size
   1305 Decompressor.prototype.setTableSizeLimit = function setTableSizeLimit(size) {
   1306  this._table.setSizeLimit(size);
   1307 };
   1308 
   1309 // `decompress` takes a full header block, and decompresses it using a new `HeaderSetDecompressor`
   1310 // stream instance. This means that from now on, the advantages of streaming header decoding are
   1311 // lost, but the API becomes simpler.
   1312 Decompressor.prototype.decompress = function decompress(block) {
   1313  var decompressor = new HeaderSetDecompressor(this._log, this._table);
   1314  decompressor.end(block);
   1315 
   1316  var seenNonColonHeader = false;
   1317  var headers = {};
   1318  var pair;
   1319  while (pair = decompressor.read()) {
   1320    var name = pair[0];
   1321    var value = pair[1];
   1322    var isColonHeader = (name.trim()[0] === ':');
   1323    if (seenNonColonHeader && isColonHeader) {
   1324        this.emit('error', 'PROTOCOL_ERROR');
   1325        return headers;
   1326    }
   1327    seenNonColonHeader = !isColonHeader;
   1328    if (name in headers) {
   1329      if (headers[name] instanceof Array) {
   1330        headers[name].push(value);
   1331      } else {
   1332        headers[name] = [headers[name], value];
   1333      }
   1334    } else {
   1335      headers[name] = value;
   1336    }
   1337  }
   1338 
   1339  // * If there are multiple Cookie header fields after decompression, these MUST be concatenated
   1340  //   into a single octet string using the two octet delimiter of 0x3B, 0x20 (the ASCII
   1341  //   string "; ").
   1342  if (('cookie' in headers) && (headers['cookie'] instanceof Array)) {
   1343    headers['cookie'] = headers['cookie'].join('; ');
   1344  }
   1345 
   1346  return headers;
   1347 };
   1348 
   1349 // When a `frame` arrives
   1350 Decompressor.prototype._transform = function _transform(frame, encoding, done) {
   1351  // * and the collection process is already `_inProgress`, the frame is simply stored, except if
   1352  //   it's an illegal frame
   1353  if (this._inProgress) {
   1354    if ((frame.type !== 'CONTINUATION') || (frame.stream !== this._base.stream)) {
   1355      this._log.error('A series of HEADER frames were not continuous');
   1356      this.emit('error', 'PROTOCOL_ERROR');
   1357      return;
   1358    }
   1359    this._frames.push(frame);
   1360  }
   1361 
   1362  // * and the collection process is not `_inProgress`, but the new frame's type is HEADERS or
   1363  //   PUSH_PROMISE, a new collection process begins
   1364  else if ((frame.type === 'HEADERS') || (frame.type === 'PUSH_PROMISE')) {
   1365    this._inProgress = true;
   1366    this._base = util._extend({}, frame);
   1367    this._frames = [frame];
   1368  }
   1369 
   1370  // * otherwise, the frame is forwarded without taking any action
   1371  else {
   1372    this.push(frame);
   1373  }
   1374 
   1375  // * When the frame signals that it's the last in the series, the header block chunks are
   1376  //   concatenated, the headers are decompressed, and a new frame gets pushed out with the
   1377  //   decompressed headers.
   1378  if (this._inProgress && (frame.flags.END_HEADERS || frame.flags.END_PUSH_PROMISE)) {
   1379    var buffer = concat(this._frames.map(function(frame) {
   1380      return frame.data;
   1381    }));
   1382    try {
   1383      var headers = this.decompress(buffer);
   1384    } catch(error) {
   1385      this._log.error({ err: error }, 'Header decompression error');
   1386      this.emit('error', 'COMPRESSION_ERROR');
   1387      return;
   1388    }
   1389    this.push(util._extend(this._base, { headers: headers }));
   1390    this._inProgress = false;
   1391  }
   1392 
   1393  done();
   1394 };
   1395 
   1396 // Helper functions
   1397 // ================
   1398 
   1399 // Concatenate an array of buffers into a new buffer
   1400 function concat(buffers) {
   1401  var size = 0;
   1402  for (var i = 0; i < buffers.length; i++) {
   1403    size += buffers[i].length;
   1404  }
   1405 
   1406  var concatenated = Buffer.alloc(size);
   1407  for (var cursor = 0, j = 0; j < buffers.length; cursor += buffers[j].length, j++) {
   1408    buffers[j].copy(concatenated, cursor);
   1409  }
   1410 
   1411  return concatenated;
   1412 }
   1413 
   1414 // Cut `buffer` into chunks not larger than `size`
   1415 function cut(buffer, size) {
   1416  var chunks = [];
   1417  var cursor = 0;
   1418  do {
   1419    var chunkSize = Math.min(size, buffer.length - cursor);
   1420    chunks.push(buffer.slice(cursor, cursor + chunkSize));
   1421    cursor += chunkSize;
   1422  } while(cursor < buffer.length);
   1423  return chunks;
   1424 }
   1425 
   1426 function trim(string) {
   1427  return string.trim();
   1428 }