compression-large-flush-output.any.js (1247B)
1 // META: global=window,worker,shadowrealm 2 // META: script=third_party/pako/pako_inflate.min.js 3 // META: script=resources/concatenate-stream.js 4 // META: script=resources/decompress.js 5 // META: script=resources/formats.js 6 // META: timeout=long 7 8 'use strict'; 9 10 // This test verifies that a large flush output will not truncate the 11 // final results. 12 13 async function compressData(chunk, format) { 14 const cs = new CompressionStream(format); 15 const writer = cs.writable.getWriter(); 16 writer.write(chunk); 17 writer.close(); 18 return await concatenateStream(cs.readable); 19 } 20 21 // JSON-encoded array of 10 thousands numbers ("[0,1,2,...]"). This produces 48_891 bytes of data. 22 const fullData = new TextEncoder().encode(JSON.stringify(Array.from({ length: 10_000 }, (_, i) => i))); 23 const data = fullData.subarray(0, 35_579); 24 const expectedValue = data; 25 26 for (const format of formats) { 27 promise_test(async t => { 28 const compressedData = await compressData(data, format); 29 const decompressedData = await decompressDataOrPako(compressedData, format); 30 // check that we got the same result as our original string 31 assert_array_equals(decompressedData, expectedValue, 'value should match'); 32 }, `${format} compression with large flush output`); 33 }