|
| 1 | +// META: global=window,worker,shadowrealm |
| 2 | +// META: script=third_party/pako/pako_inflate.min.js |
| 3 | +// META: timeout=long |
| 4 | + |
| 5 | +'use strict'; |
| 6 | + |
| 7 | +// This test asserts that compressing multiple chunks should work. |
| 8 | + |
| 9 | +// Example: ('Hello', 3) => TextEncoder().encode('HelloHelloHello') |
| 10 | +function makeExpectedChunk(input, numberOfChunks) { |
| 11 | + const expectedChunk = input.repeat(numberOfChunks); |
| 12 | + return new TextEncoder().encode(expectedChunk); |
| 13 | +} |
| 14 | + |
| 15 | +// Example: ('Hello', 3, 'deflate') => compress ['Hello', 'Hello', Hello'] |
| 16 | +async function compressMultipleChunks(input, numberOfChunks, format) { |
| 17 | + const cs = new CompressionStream(format); |
| 18 | + const writer = cs.writable.getWriter(); |
| 19 | + const chunk = new TextEncoder().encode(input); |
| 20 | + for (let i = 0; i < numberOfChunks; ++i) { |
| 21 | + writer.write(chunk); |
| 22 | + } |
| 23 | + const closePromise = writer.close(); |
| 24 | + const out = []; |
| 25 | + const reader = cs.readable.getReader(); |
| 26 | + let totalSize = 0; |
| 27 | + while (true) { |
| 28 | + const { value, done } = await reader.read(); |
| 29 | + if (done) |
| 30 | + break; |
| 31 | + out.push(value); |
| 32 | + totalSize += value.byteLength; |
| 33 | + } |
| 34 | + await closePromise; |
| 35 | + const concatenated = new Uint8Array(totalSize); |
| 36 | + let offset = 0; |
| 37 | + for (const array of out) { |
| 38 | + concatenated.set(array, offset); |
| 39 | + offset += array.byteLength; |
| 40 | + } |
| 41 | + return concatenated; |
| 42 | +} |
| 43 | + |
| 44 | +const hello = 'Hello'; |
| 45 | + |
| 46 | +for (let numberOfChunks = 2; numberOfChunks <= 16; ++numberOfChunks) { |
| 47 | + promise_test(async t => { |
| 48 | + const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate'); |
| 49 | + const expectedValue = makeExpectedChunk(hello, numberOfChunks); |
| 50 | + // decompress with pako, and check that we got the same result as our original string |
| 51 | + assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); |
| 52 | + }, `compressing ${numberOfChunks} chunks with deflate should work`); |
| 53 | + |
| 54 | + promise_test(async t => { |
| 55 | + const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'gzip'); |
| 56 | + const expectedValue = makeExpectedChunk(hello, numberOfChunks); |
| 57 | + // decompress with pako, and check that we got the same result as our original string |
| 58 | + assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); |
| 59 | + }, `compressing ${numberOfChunks} chunks with gzip should work`); |
| 60 | + |
| 61 | + promise_test(async t => { |
| 62 | + const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate-raw'); |
| 63 | + const expectedValue = makeExpectedChunk(hello, numberOfChunks); |
| 64 | + // decompress with pako, and check that we got the same result as our original string |
| 65 | + assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match'); |
| 66 | + }, `compressing ${numberOfChunks} chunks with deflate-raw should work`); |
| 67 | +} |
0 commit comments