Skip to content

Commit da9aafd

Browse files
committed
test,stream: add compression tests to the WPT test suite
1 parent 9fbd7f8 commit da9aafd

32 files changed

+1433
-1
lines changed

test/common/wpt.js

+2-1
Original file line numberDiff line numberDiff line change
@@ -210,6 +210,7 @@ class ResourceLoader {
210210
const data = await fsPromises.readFile(file);
211211
return {
212212
ok: true,
213+
arrayBuffer() { return data.buffer; },
213214
json() { return JSON.parse(data.toString()); },
214215
text() { return data.toString(); },
215216
};
@@ -440,7 +441,7 @@ class StatusLoader {
440441
const list = this.grep(filepath);
441442
result = result.concat(list);
442443
} else {
443-
if (!(/\.\w+\.js$/.test(filepath)) || filepath.endsWith('.helper.js')) {
444+
if (!(/\.\w+\.js$/.test(filepath))) {
444445
continue;
445446
}
446447
result.push(filepath);

test/fixtures/wpt/README.md

+1
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ See [test/wpt](../../wpt/README.md) for information on how these tests are run.
1111
Last update:
1212

1313
- common: https://github.com/web-platform-tests/wpt/tree/dbd648158d/common
14+
- compression: https://github.com/web-platform-tests/wpt/tree/c82521cfa5/compression
1415
- console: https://github.com/web-platform-tests/wpt/tree/767ae35464/console
1516
- dom/abort: https://github.com/web-platform-tests/wpt/tree/d1f1ecbd52/dom/abort
1617
- dom/events: https://github.com/web-platform-tests/wpt/tree/ab8999891c/dom/events
+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
spec: https://wicg.github.io/compression/
2+
suggested_reviewers:
3+
- ricea
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,74 @@
1+
// META: global=window,worker,shadowrealm
2+
3+
'use strict';
4+
5+
const badChunks = [
6+
{
7+
name: 'undefined',
8+
value: undefined
9+
},
10+
{
11+
name: 'null',
12+
value: null
13+
},
14+
{
15+
name: 'numeric',
16+
value: 3.14
17+
},
18+
{
19+
name: 'object, not BufferSource',
20+
value: {}
21+
},
22+
{
23+
name: 'array',
24+
value: [65]
25+
},
26+
{
27+
name: 'SharedArrayBuffer',
28+
// Use a getter to postpone construction so that all tests don't fail where
29+
// SharedArrayBuffer is not yet implemented.
30+
get value() {
31+
// See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()`
32+
return new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer;
33+
}
34+
},
35+
{
36+
name: 'shared Uint8Array',
37+
get value() {
38+
// See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()`
39+
return new Uint8Array(new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer)
40+
}
41+
},
42+
];
43+
44+
for (const chunk of badChunks) {
45+
promise_test(async t => {
46+
const cs = new CompressionStream('gzip');
47+
const reader = cs.readable.getReader();
48+
const writer = cs.writable.getWriter();
49+
const writePromise = writer.write(chunk.value);
50+
const readPromise = reader.read();
51+
await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
52+
await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
53+
}, `chunk of type ${chunk.name} should error the stream for gzip`);
54+
55+
promise_test(async t => {
56+
const cs = new CompressionStream('deflate');
57+
const reader = cs.readable.getReader();
58+
const writer = cs.writable.getWriter();
59+
const writePromise = writer.write(chunk.value);
60+
const readPromise = reader.read();
61+
await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
62+
await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
63+
}, `chunk of type ${chunk.name} should error the stream for deflate`);
64+
65+
promise_test(async t => {
66+
const cs = new CompressionStream('deflate-raw');
67+
const reader = cs.readable.getReader();
68+
const writer = cs.writable.getWriter();
69+
const writePromise = writer.write(chunk.value);
70+
const readPromise = reader.read();
71+
await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
72+
await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
73+
}, `chunk of type ${chunk.name} should error the stream for deflate-raw`);
74+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
// META: global=window,worker,shadowrealm
2+
3+
'use strict';
4+
5+
test(t => {
6+
assert_throws_js(TypeError, () => new CompressionStream('a'), 'constructor should throw');
7+
}, '"a" should cause the constructor to throw');
8+
9+
test(t => {
10+
assert_throws_js(TypeError, () => new CompressionStream(), 'constructor should throw');
11+
}, 'no input should cause the constructor to throw');
12+
13+
test(t => {
14+
assert_throws_js(Error, () => new CompressionStream({ toString() { throw Error(); } }), 'constructor should throw');
15+
}, 'non-string input should cause the constructor to throw');
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
// META: global=window,worker,shadowrealm
2+
// META: script=third_party/pako/pako_inflate.min.js
3+
// META: timeout=long
4+
5+
'use strict';
6+
7+
// This test asserts that compressing '' doesn't affect the compressed data.
8+
// Example: compressing ['Hello', '', 'Hello'] results in 'HelloHello'
9+
10+
async function compressChunkList(chunkList, format) {
11+
const cs = new CompressionStream(format);
12+
const writer = cs.writable.getWriter();
13+
for (const chunk of chunkList) {
14+
const chunkByte = new TextEncoder().encode(chunk);
15+
writer.write(chunkByte);
16+
}
17+
const closePromise = writer.close();
18+
const out = [];
19+
const reader = cs.readable.getReader();
20+
let totalSize = 0;
21+
while (true) {
22+
const { value, done } = await reader.read();
23+
if (done)
24+
break;
25+
out.push(value);
26+
totalSize += value.byteLength;
27+
}
28+
await closePromise;
29+
const concatenated = new Uint8Array(totalSize);
30+
let offset = 0;
31+
for (const array of out) {
32+
concatenated.set(array, offset);
33+
offset += array.byteLength;
34+
}
35+
return concatenated;
36+
}
37+
38+
const chunkLists = [
39+
['', 'Hello', 'Hello'],
40+
['Hello', '', 'Hello'],
41+
['Hello', 'Hello', '']
42+
];
43+
const expectedValue = new TextEncoder().encode('HelloHello');
44+
45+
for (const chunkList of chunkLists) {
46+
promise_test(async t => {
47+
const compressedData = await compressChunkList(chunkList, 'deflate');
48+
// decompress with pako, and check that we got the same result as our original string
49+
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
50+
}, `the result of compressing [${chunkList}] with deflate should be 'HelloHello'`);
51+
52+
promise_test(async t => {
53+
const compressedData = await compressChunkList(chunkList, 'gzip');
54+
// decompress with pako, and check that we got the same result as our original string
55+
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
56+
}, `the result of compressing [${chunkList}] with gzip should be 'HelloHello'`);
57+
58+
promise_test(async t => {
59+
const compressedData = await compressChunkList(chunkList, 'deflate-raw');
60+
// decompress with pako, and check that we got the same result as our original string
61+
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
62+
}, `the result of compressing [${chunkList}] with deflate-raw should be 'HelloHello'`);
63+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
// META: global=window,worker,shadowrealm
2+
// META: script=third_party/pako/pako_inflate.min.js
3+
// META: script=resources/concatenate-stream.js
4+
// META: timeout=long
5+
6+
'use strict';
7+
8+
// This test verifies that a large flush output will not truncate the
9+
// final results.
10+
11+
async function compressData(chunk, format) {
12+
const cs = new CompressionStream(format);
13+
const writer = cs.writable.getWriter();
14+
writer.write(chunk);
15+
writer.close();
16+
return await concatenateStream(cs.readable);
17+
}
18+
19+
// JSON-encoded array of 10 thousands numbers ("[0,1,2,...]"). This produces 48_891 bytes of data.
20+
const fullData = new TextEncoder().encode(JSON.stringify(Array.from({ length: 10_000 }, (_, i) => i)));
21+
const data = fullData.subarray(0, 35_579);
22+
const expectedValue = data;
23+
24+
promise_test(async t => {
25+
const compressedData = await compressData(data, 'deflate');
26+
// decompress with pako, and check that we got the same result as our original string
27+
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
28+
}, `deflate compression with large flush output`);
29+
30+
promise_test(async t => {
31+
const compressedData = await compressData(data, 'gzip');
32+
// decompress with pako, and check that we got the same result as our original string
33+
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
34+
}, `gzip compression with large flush output`);
35+
36+
promise_test(async t => {
37+
const compressedData = await compressData(data, 'deflate-raw');
38+
// decompress with pako, and check that we got the same result as our original string
39+
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
40+
}, `deflate-raw compression with large flush output`);
41+
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
// META: global=window,worker,shadowrealm
2+
// META: script=third_party/pako/pako_inflate.min.js
3+
// META: timeout=long
4+
5+
'use strict';
6+
7+
// This test asserts that compressing multiple chunks should work.
8+
9+
// Example: ('Hello', 3) => TextEncoder().encode('HelloHelloHello')
10+
function makeExpectedChunk(input, numberOfChunks) {
11+
const expectedChunk = input.repeat(numberOfChunks);
12+
return new TextEncoder().encode(expectedChunk);
13+
}
14+
15+
// Example: ('Hello', 3, 'deflate') => compress ['Hello', 'Hello', Hello']
16+
async function compressMultipleChunks(input, numberOfChunks, format) {
17+
const cs = new CompressionStream(format);
18+
const writer = cs.writable.getWriter();
19+
const chunk = new TextEncoder().encode(input);
20+
for (let i = 0; i < numberOfChunks; ++i) {
21+
writer.write(chunk);
22+
}
23+
const closePromise = writer.close();
24+
const out = [];
25+
const reader = cs.readable.getReader();
26+
let totalSize = 0;
27+
while (true) {
28+
const { value, done } = await reader.read();
29+
if (done)
30+
break;
31+
out.push(value);
32+
totalSize += value.byteLength;
33+
}
34+
await closePromise;
35+
const concatenated = new Uint8Array(totalSize);
36+
let offset = 0;
37+
for (const array of out) {
38+
concatenated.set(array, offset);
39+
offset += array.byteLength;
40+
}
41+
return concatenated;
42+
}
43+
44+
const hello = 'Hello';
45+
46+
for (let numberOfChunks = 2; numberOfChunks <= 16; ++numberOfChunks) {
47+
promise_test(async t => {
48+
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate');
49+
const expectedValue = makeExpectedChunk(hello, numberOfChunks);
50+
// decompress with pako, and check that we got the same result as our original string
51+
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
52+
}, `compressing ${numberOfChunks} chunks with deflate should work`);
53+
54+
promise_test(async t => {
55+
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'gzip');
56+
const expectedValue = makeExpectedChunk(hello, numberOfChunks);
57+
// decompress with pako, and check that we got the same result as our original string
58+
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
59+
}, `compressing ${numberOfChunks} chunks with gzip should work`);
60+
61+
promise_test(async t => {
62+
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate-raw');
63+
const expectedValue = makeExpectedChunk(hello, numberOfChunks);
64+
// decompress with pako, and check that we got the same result as our original string
65+
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
66+
}, `compressing ${numberOfChunks} chunks with deflate-raw should work`);
67+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
// META: global=window,worker,shadowrealm
2+
3+
'use strict';
4+
5+
// This test asserts that compressed data length is shorter than the original
6+
// data length. If the input is extremely small, the compressed data may be
7+
// larger than the original data.
8+
9+
const LARGE_FILE = '/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm';
10+
11+
async function compressArrayBuffer(input, format) {
12+
const cs = new CompressionStream(format);
13+
const writer = cs.writable.getWriter();
14+
writer.write(input);
15+
const closePromise = writer.close();
16+
const out = [];
17+
const reader = cs.readable.getReader();
18+
let totalSize = 0;
19+
while (true) {
20+
const { value, done } = await reader.read();
21+
if (done)
22+
break;
23+
out.push(value);
24+
totalSize += value.byteLength;
25+
}
26+
await closePromise;
27+
const concatenated = new Uint8Array(totalSize);
28+
let offset = 0;
29+
for (const array of out) {
30+
concatenated.set(array, offset);
31+
offset += array.byteLength;
32+
}
33+
return concatenated;
34+
}
35+
36+
promise_test(async () => {
37+
const response = await fetch(LARGE_FILE);
38+
const buffer = await response.arrayBuffer();
39+
const bufferView = new Uint8Array(buffer);
40+
const originalLength = bufferView.length;
41+
const compressedData = await compressArrayBuffer(bufferView, 'deflate');
42+
const compressedLength = compressedData.length;
43+
assert_less_than(compressedLength, originalLength, 'output should be smaller');
44+
}, 'the length of deflated data should be shorter than that of the original data');
45+
46+
promise_test(async () => {
47+
const response = await fetch(LARGE_FILE);
48+
const buffer = await response.arrayBuffer();
49+
const bufferView = new Uint8Array(buffer);
50+
const originalLength = bufferView.length;
51+
const compressedData = await compressArrayBuffer(bufferView, 'gzip');
52+
const compressedLength = compressedData.length;
53+
assert_less_than(compressedLength, originalLength, 'output should be smaller');
54+
}, 'the length of gzipped data should be shorter than that of the original data');
55+
56+
promise_test(async () => {
57+
const response = await fetch(LARGE_FILE);
58+
const buffer = await response.arrayBuffer();
59+
const bufferView = new Uint8Array(buffer);
60+
const originalLength = bufferView.length;
61+
const compressedData = await compressArrayBuffer(bufferView, 'deflate-raw');
62+
const compressedLength = compressedData.length;
63+
assert_less_than(compressedLength, originalLength, 'output should be smaller');
64+
}, 'the length of deflated (with -raw) data should be shorter than that of the original data');

0 commit comments

Comments
 (0)