commit 2e28285b9aa340ac2b89088e13dbc50220920917
parent 67add2edd1f5791adec1ea8f576dbf8c183755fd
Author: Kagami Sascha Rosylight <krosylight@proton.me>
Date: Mon, 17 Nov 2025 13:11:08 +0000
Bug 1921583 - Part 5: Refactor compression tests to cover brotli r=smaug
Differential Revision: https://phabricator.services.mozilla.com/D269961
Diffstat:
5 files changed, 83 insertions(+), 108 deletions(-)
diff --git a/testing/web-platform/tests/compression/compression-including-empty-chunk.any.js b/testing/web-platform/tests/compression/compression-including-empty-chunk.any.js
@@ -1,5 +1,7 @@
// META: global=window,worker,shadowrealm
// META: script=third_party/pako/pako_inflate.min.js
+// META: script=resources/decompress.js
+// META: script=resources/formats.js
// META: timeout=long
'use strict';
@@ -42,22 +44,13 @@ const chunkLists = [
];
const expectedValue = new TextEncoder().encode('HelloHello');
-for (const chunkList of chunkLists) {
- promise_test(async t => {
- const compressedData = await compressChunkList(chunkList, 'deflate');
- // decompress with pako, and check that we got the same result as our original string
- assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
- }, `the result of compressing [${chunkList}] with deflate should be 'HelloHello'`);
-
- promise_test(async t => {
- const compressedData = await compressChunkList(chunkList, 'gzip');
- // decompress with pako, and check that we got the same result as our original string
- assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
- }, `the result of compressing [${chunkList}] with gzip should be 'HelloHello'`);
-
- promise_test(async t => {
- const compressedData = await compressChunkList(chunkList, 'deflate-raw');
- // decompress with pako, and check that we got the same result as our original string
- assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
- }, `the result of compressing [${chunkList}] with deflate-raw should be 'HelloHello'`);
+for (const format of formats) {
+ for (const chunkList of chunkLists) {
+ promise_test(async t => {
+ const compressedData = await compressChunkList(chunkList, format);
+ const decompressedData = await decompressDataOrPako(compressedData, format);
+ // check that we got the same result as our original string
+ assert_array_equals(expectedValue, decompressedData, 'value should match');
+ }, `the result of compressing [${chunkList}] with ${format} should be 'HelloHello'`);
+ }
}
diff --git a/testing/web-platform/tests/compression/compression-large-flush-output.any.js b/testing/web-platform/tests/compression/compression-large-flush-output.any.js
@@ -1,6 +1,8 @@
// META: global=window,worker,shadowrealm
// META: script=third_party/pako/pako_inflate.min.js
// META: script=resources/concatenate-stream.js
+// META: script=resources/decompress.js
+// META: script=resources/formats.js
// META: timeout=long
'use strict';
@@ -21,21 +23,11 @@ const fullData = new TextEncoder().encode(JSON.stringify(Array.from({ length: 10
const data = fullData.subarray(0, 35_579);
const expectedValue = data;
-promise_test(async t => {
- const compressedData = await compressData(data, 'deflate');
- // decompress with pako, and check that we got the same result as our original string
- assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
-}, `deflate compression with large flush output`);
-
-promise_test(async t => {
- const compressedData = await compressData(data, 'gzip');
- // decompress with pako, and check that we got the same result as our original string
- assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
-}, `gzip compression with large flush output`);
-
-promise_test(async t => {
- const compressedData = await compressData(data, 'deflate-raw');
- // decompress with pako, and check that we got the same result as our original string
- assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
-}, `deflate-raw compression with large flush output`);
-
+for (const format of formats) {
+ promise_test(async t => {
+ const compressedData = await compressData(data, format);
+ const decompressedData = await decompressDataOrPako(compressedData, format);
+ // check that we got the same result as our original string
+ assert_array_equals(decompressedData, expectedValue, 'value should match');
+ }, `${format} compression with large flush output`);
+}
diff --git a/testing/web-platform/tests/compression/compression-multiple-chunks.any.js b/testing/web-platform/tests/compression/compression-multiple-chunks.any.js
@@ -1,5 +1,7 @@
// META: global=window,worker,shadowrealm
// META: script=third_party/pako/pako_inflate.min.js
+// META: script=resources/decompress.js
+// META: script=resources/formats.js
// META: timeout=long
'use strict';
@@ -43,25 +45,14 @@ async function compressMultipleChunks(input, numberOfChunks, format) {
const hello = 'Hello';
-for (let numberOfChunks = 2; numberOfChunks <= 16; ++numberOfChunks) {
- promise_test(async t => {
- const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate');
- const expectedValue = makeExpectedChunk(hello, numberOfChunks);
- // decompress with pako, and check that we got the same result as our original string
- assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
- }, `compressing ${numberOfChunks} chunks with deflate should work`);
-
- promise_test(async t => {
- const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'gzip');
- const expectedValue = makeExpectedChunk(hello, numberOfChunks);
- // decompress with pako, and check that we got the same result as our original string
- assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
- }, `compressing ${numberOfChunks} chunks with gzip should work`);
-
- promise_test(async t => {
- const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate-raw');
- const expectedValue = makeExpectedChunk(hello, numberOfChunks);
- // decompress with pako, and check that we got the same result as our original string
- assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
- }, `compressing ${numberOfChunks} chunks with deflate-raw should work`);
+for (const format of formats) {
+ for (let numberOfChunks = 2; numberOfChunks <= 16; ++numberOfChunks) {
+ promise_test(async t => {
+ const compressedData = await compressMultipleChunks(hello, numberOfChunks, format);
+ const decompressedData = await decompressDataOrPako(compressedData, format);
+ const expectedValue = makeExpectedChunk(hello, numberOfChunks);
+ // check that we got the same result as our original string
+ assert_array_equals(decompressedData, expectedValue, 'value should match');
+ }, `compressing ${numberOfChunks} chunks with ${format} should work`);
+ }
}
diff --git a/testing/web-platform/tests/compression/compression-stream.any.js b/testing/web-platform/tests/compression/compression-stream.any.js
@@ -1,5 +1,7 @@
// META: global=window,worker,shadowrealm
// META: script=third_party/pako/pako_inflate.min.js
+// META: script=resources/decompress.js
+// META: script=resources/formats.js
// META: timeout=long
'use strict';
@@ -7,6 +9,12 @@
const SMALL_FILE = "/media/foo.vtt";
const LARGE_FILE = "/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm";
+let dataPromiseList = [
+ ["empty data", Promise.resolve(new Uint8Array(0))],
+ ["small amount data", fetch(SMALL_FILE).then(response => response.bytes())],
+ ["large amount data", fetch(LARGE_FILE).then(response => response.bytes())],
+];
+
async function compressArrayBuffer(input, format) {
const cs = new CompressionStream(format);
const writer = cs.writable.getWriter();
@@ -38,54 +46,14 @@ test(() => {
}, "non supported format should throw");
}, "CompressionStream constructor should throw on invalid format");
-promise_test(async () => {
- const buffer = new ArrayBuffer(0);
- const bufferView = new Uint8Array(buffer);
- const compressedData = await compressArrayBuffer(bufferView, "deflate");
- // decompress with pako, and check that we got the same result as our original string
- assert_array_equals(bufferView, pako.inflate(compressedData));
-}, "deflated empty data should be reinflated back to its origin");
-
-promise_test(async () => {
- const response = await fetch(SMALL_FILE)
- const buffer = await response.arrayBuffer();
- const bufferView = new Uint8Array(buffer);
- const compressedData = await compressArrayBuffer(bufferView, "deflate");
- // decompress with pako, and check that we got the same result as our original string
- assert_array_equals(bufferView, pako.inflate(compressedData));
-}, "deflated small amount data should be reinflated back to its origin");
-
-promise_test(async () => {
- const response = await fetch(LARGE_FILE)
- const buffer = await response.arrayBuffer();
- const bufferView = new Uint8Array(buffer);
- const compressedData = await compressArrayBuffer(bufferView, "deflate");
- // decompress with pako, and check that we got the same result as our original string
- assert_array_equals(bufferView, pako.inflate(compressedData));
-}, "deflated large amount data should be reinflated back to its origin");
-
-promise_test(async () => {
- const buffer = new ArrayBuffer(0);
- const bufferView = new Uint8Array(buffer);
- const compressedData = await compressArrayBuffer(bufferView, "gzip");
- // decompress with pako, and check that we got the same result as our original string
- assert_array_equals(bufferView, pako.inflate(compressedData));
-}, "gzipped empty data should be reinflated back to its origin");
-
-promise_test(async () => {
- const response = await fetch(SMALL_FILE)
- const buffer = await response.arrayBuffer();
- const bufferView = new Uint8Array(buffer);
- const compressedData = await compressArrayBuffer(bufferView, "gzip");
- // decompress with pako, and check that we got the same result as our original string
- assert_array_equals(bufferView, pako.inflate(compressedData));
-}, "gzipped small amount data should be reinflated back to its origin");
-
-promise_test(async () => {
- const response = await fetch(LARGE_FILE)
- const buffer = await response.arrayBuffer();
- const bufferView = new Uint8Array(buffer);
- const compressedData = await compressArrayBuffer(bufferView, "gzip");
- // decompress with pako, and check that we got the same result as our original string
- assert_array_equals(bufferView, pako.inflate(compressedData));
-}, "gzipped large amount data should be reinflated back to its origin");
+for (const format of formats) {
+ for (const [label, dataPromise] of dataPromiseList) {
+ promise_test(async () => {
+ const bufferView = await dataPromise;
+ const compressedData = await compressArrayBuffer(bufferView, format);
+ const decompressedData = await decompressDataOrPako(compressedData, format);
+ // check that we got the same result as our original string
+ assert_array_equals(decompressedData, bufferView, 'value should match');
+ }, `${format} ${label} should be reinflated back to its origin`);
+ }
+}
diff --git a/testing/web-platform/tests/compression/resources/decompress.js b/testing/web-platform/tests/compression/resources/decompress.js
@@ -0,0 +1,31 @@
+/**
+ * @param {Uint8Array} chunk
+ * @param {string} format
+ */
+async function decompressData(chunk, format) {
+ const ds = new DecompressionStream(format);
+ const writer = ds.writable.getWriter();
+ writer.write(chunk);
+ writer.close();
+ const decompressedChunkList = await Array.fromAsync(ds.readable);
+ const mergedBlob = new Blob(decompressedChunkList);
+ return await mergedBlob.bytes();
+}
+
+/**
+ * @param {Uint8Array} chunk
+ * @param {string} format
+ */
+async function decompressDataOrPako(chunk, format) {
+ // Keep using pako for zlib to preserve existing test behavior
+ if (["deflate", "gzip"].includes(format)) {
+ return pako.inflate(chunk);
+ }
+ if (format === "deflate-raw") {
+ return pako.inflateRaw(chunk);
+ }
+
+ // Use DecompressionStream for any newer formats, assuming implementations
+ // always implement decompression if they implement compression.
+ return decompressData(chunk, format);
+}