-
Notifications
You must be signed in to change notification settings - Fork 29.7k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
test,stream: add compression tests to the WPT test suite
- Loading branch information
Showing
32 changed files
with
1,433 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
spec: https://wicg.github.io/compression/ | ||
suggested_reviewers: | ||
- ricea |
74 changes: 74 additions & 0 deletions
74
test/fixtures/wpt/compression/compression-bad-chunks.tentative.any.js
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,74 @@ | ||
// META: global=window,worker,shadowrealm | ||
|
||
'use strict'; | ||
|
||
const badChunks = [ | ||
{ | ||
name: 'undefined', | ||
value: undefined | ||
}, | ||
{ | ||
name: 'null', | ||
value: null | ||
}, | ||
{ | ||
name: 'numeric', | ||
value: 3.14 | ||
}, | ||
{ | ||
name: 'object, not BufferSource', | ||
value: {} | ||
}, | ||
{ | ||
name: 'array', | ||
value: [65] | ||
}, | ||
{ | ||
name: 'SharedArrayBuffer', | ||
// Use a getter to postpone construction so that all tests don't fail where | ||
// SharedArrayBuffer is not yet implemented. | ||
get value() { | ||
// See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()` | ||
return new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer; | ||
} | ||
}, | ||
{ | ||
name: 'shared Uint8Array', | ||
get value() { | ||
// See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()` | ||
return new Uint8Array(new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer) | ||
} | ||
}, | ||
]; | ||
|
||
for (const chunk of badChunks) { | ||
promise_test(async t => { | ||
const cs = new CompressionStream('gzip'); | ||
const reader = cs.readable.getReader(); | ||
const writer = cs.writable.getWriter(); | ||
const writePromise = writer.write(chunk.value); | ||
const readPromise = reader.read(); | ||
await promise_rejects_js(t, TypeError, writePromise, 'write should reject'); | ||
await promise_rejects_js(t, TypeError, readPromise, 'read should reject'); | ||
}, `chunk of type ${chunk.name} should error the stream for gzip`); | ||
|
||
promise_test(async t => { | ||
const cs = new CompressionStream('deflate'); | ||
const reader = cs.readable.getReader(); | ||
const writer = cs.writable.getWriter(); | ||
const writePromise = writer.write(chunk.value); | ||
const readPromise = reader.read(); | ||
await promise_rejects_js(t, TypeError, writePromise, 'write should reject'); | ||
await promise_rejects_js(t, TypeError, readPromise, 'read should reject'); | ||
}, `chunk of type ${chunk.name} should error the stream for deflate`); | ||
|
||
promise_test(async t => { | ||
const cs = new CompressionStream('deflate-raw'); | ||
const reader = cs.readable.getReader(); | ||
const writer = cs.writable.getWriter(); | ||
const writePromise = writer.write(chunk.value); | ||
const readPromise = reader.read(); | ||
await promise_rejects_js(t, TypeError, writePromise, 'write should reject'); | ||
await promise_rejects_js(t, TypeError, readPromise, 'read should reject'); | ||
}, `chunk of type ${chunk.name} should error the stream for deflate-raw`); | ||
} |
15 changes: 15 additions & 0 deletions
15
test/fixtures/wpt/compression/compression-constructor-error.tentative.any.js
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
// META: global=window,worker,shadowrealm | ||
|
||
'use strict'; | ||
|
||
test(t => { | ||
assert_throws_js(TypeError, () => new CompressionStream('a'), 'constructor should throw'); | ||
}, '"a" should cause the constructor to throw'); | ||
|
||
test(t => { | ||
assert_throws_js(TypeError, () => new CompressionStream(), 'constructor should throw'); | ||
}, 'no input should cause the constructor to throw'); | ||
|
||
test(t => { | ||
assert_throws_js(Error, () => new CompressionStream({ toString() { throw Error(); } }), 'constructor should throw'); | ||
}, 'non-string input should cause the constructor to throw'); |
63 changes: 63 additions & 0 deletions
63
test/fixtures/wpt/compression/compression-including-empty-chunk.tentative.any.js
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,63 @@ | ||
// META: global=window,worker,shadowrealm | ||
// META: script=third_party/pako/pako_inflate.min.js | ||
// META: timeout=long | ||
|
||
'use strict'; | ||
|
||
// This test asserts that compressing '' doesn't affect the compressed data. | ||
// Example: compressing ['Hello', '', 'Hello'] results in 'HelloHello' | ||
|
||
async function compressChunkList(chunkList, format) { | ||
const cs = new CompressionStream(format); | ||
const writer = cs.writable.getWriter(); | ||
for (const chunk of chunkList) { | ||
const chunkByte = new TextEncoder().encode(chunk); | ||
writer.write(chunkByte); | ||
} | ||
const closePromise = writer.close(); | ||
const out = []; | ||
const reader = cs.readable.getReader(); | ||
let totalSize = 0; | ||
while (true) { | ||
const { value, done } = await reader.read(); | ||
if (done) | ||
break; | ||
out.push(value); | ||
totalSize += value.byteLength; | ||
} | ||
await closePromise; | ||
const concatenated = new Uint8Array(totalSize); | ||
let offset = 0; | ||
for (const array of out) { | ||
concatenated.set(array, offset); | ||
offset += array.byteLength; | ||
} | ||
return concatenated; | ||
} | ||
|
||
const chunkLists = [ | ||
['', 'Hello', 'Hello'], | ||
['Hello', '', 'Hello'], | ||
['Hello', 'Hello', ''] | ||
]; | ||
const expectedValue = new TextEncoder().encode('HelloHello'); | ||
|
||
for (const chunkList of chunkLists) { | ||
promise_test(async t => { | ||
const compressedData = await compressChunkList(chunkList, 'deflate'); | ||
// decompress with pako, and check that we got the same result as our original string | ||
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); | ||
}, `the result of compressing [${chunkList}] with deflate should be 'HelloHello'`); | ||
|
||
promise_test(async t => { | ||
const compressedData = await compressChunkList(chunkList, 'gzip'); | ||
// decompress with pako, and check that we got the same result as our original string | ||
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); | ||
}, `the result of compressing [${chunkList}] with gzip should be 'HelloHello'`); | ||
|
||
promise_test(async t => { | ||
const compressedData = await compressChunkList(chunkList, 'deflate-raw'); | ||
// decompress with pako, and check that we got the same result as our original string | ||
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match'); | ||
}, `the result of compressing [${chunkList}] with deflate-raw should be 'HelloHello'`); | ||
} |
41 changes: 41 additions & 0 deletions
41
test/fixtures/wpt/compression/compression-large-flush-output.any.js
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
// META: global=window,worker,shadowrealm | ||
// META: script=third_party/pako/pako_inflate.min.js | ||
// META: script=resources/concatenate-stream.js | ||
// META: timeout=long | ||
|
||
'use strict'; | ||
|
||
// This test verifies that a large flush output will not truncate the | ||
// final results. | ||
|
||
async function compressData(chunk, format) { | ||
const cs = new CompressionStream(format); | ||
const writer = cs.writable.getWriter(); | ||
writer.write(chunk); | ||
writer.close(); | ||
return await concatenateStream(cs.readable); | ||
} | ||
|
||
// JSON-encoded array of 10 thousands numbers ("[0,1,2,...]"). This produces 48_891 bytes of data. | ||
const fullData = new TextEncoder().encode(JSON.stringify(Array.from({ length: 10_000 }, (_, i) => i))); | ||
const data = fullData.subarray(0, 35_579); | ||
const expectedValue = data; | ||
|
||
promise_test(async t => { | ||
const compressedData = await compressData(data, 'deflate'); | ||
// decompress with pako, and check that we got the same result as our original string | ||
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); | ||
}, `deflate compression with large flush output`); | ||
|
||
promise_test(async t => { | ||
const compressedData = await compressData(data, 'gzip'); | ||
// decompress with pako, and check that we got the same result as our original string | ||
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); | ||
}, `gzip compression with large flush output`); | ||
|
||
promise_test(async t => { | ||
const compressedData = await compressData(data, 'deflate-raw'); | ||
// decompress with pako, and check that we got the same result as our original string | ||
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match'); | ||
}, `deflate-raw compression with large flush output`); | ||
|
67 changes: 67 additions & 0 deletions
67
test/fixtures/wpt/compression/compression-multiple-chunks.tentative.any.js
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,67 @@ | ||
// META: global=window,worker,shadowrealm | ||
// META: script=third_party/pako/pako_inflate.min.js | ||
// META: timeout=long | ||
|
||
'use strict'; | ||
|
||
// This test asserts that compressing multiple chunks should work. | ||
|
||
// Example: ('Hello', 3) => TextEncoder().encode('HelloHelloHello') | ||
function makeExpectedChunk(input, numberOfChunks) { | ||
const expectedChunk = input.repeat(numberOfChunks); | ||
return new TextEncoder().encode(expectedChunk); | ||
} | ||
|
||
// Example: ('Hello', 3, 'deflate') => compress ['Hello', 'Hello', Hello'] | ||
async function compressMultipleChunks(input, numberOfChunks, format) { | ||
const cs = new CompressionStream(format); | ||
const writer = cs.writable.getWriter(); | ||
const chunk = new TextEncoder().encode(input); | ||
for (let i = 0; i < numberOfChunks; ++i) { | ||
writer.write(chunk); | ||
} | ||
const closePromise = writer.close(); | ||
const out = []; | ||
const reader = cs.readable.getReader(); | ||
let totalSize = 0; | ||
while (true) { | ||
const { value, done } = await reader.read(); | ||
if (done) | ||
break; | ||
out.push(value); | ||
totalSize += value.byteLength; | ||
} | ||
await closePromise; | ||
const concatenated = new Uint8Array(totalSize); | ||
let offset = 0; | ||
for (const array of out) { | ||
concatenated.set(array, offset); | ||
offset += array.byteLength; | ||
} | ||
return concatenated; | ||
} | ||
|
||
const hello = 'Hello'; | ||
|
||
for (let numberOfChunks = 2; numberOfChunks <= 16; ++numberOfChunks) { | ||
promise_test(async t => { | ||
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate'); | ||
const expectedValue = makeExpectedChunk(hello, numberOfChunks); | ||
// decompress with pako, and check that we got the same result as our original string | ||
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); | ||
}, `compressing ${numberOfChunks} chunks with deflate should work`); | ||
|
||
promise_test(async t => { | ||
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'gzip'); | ||
const expectedValue = makeExpectedChunk(hello, numberOfChunks); | ||
// decompress with pako, and check that we got the same result as our original string | ||
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); | ||
}, `compressing ${numberOfChunks} chunks with gzip should work`); | ||
|
||
promise_test(async t => { | ||
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate-raw'); | ||
const expectedValue = makeExpectedChunk(hello, numberOfChunks); | ||
// decompress with pako, and check that we got the same result as our original string | ||
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match'); | ||
}, `compressing ${numberOfChunks} chunks with deflate-raw should work`); | ||
} |
64 changes: 64 additions & 0 deletions
64
test/fixtures/wpt/compression/compression-output-length.tentative.any.js
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,64 @@ | ||
// META: global=window,worker,shadowrealm | ||
|
||
'use strict'; | ||
|
||
// This test asserts that compressed data length is shorter than the original | ||
// data length. If the input is extremely small, the compressed data may be | ||
// larger than the original data. | ||
|
||
const LARGE_FILE = '/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm'; | ||
|
||
async function compressArrayBuffer(input, format) { | ||
const cs = new CompressionStream(format); | ||
const writer = cs.writable.getWriter(); | ||
writer.write(input); | ||
const closePromise = writer.close(); | ||
const out = []; | ||
const reader = cs.readable.getReader(); | ||
let totalSize = 0; | ||
while (true) { | ||
const { value, done } = await reader.read(); | ||
if (done) | ||
break; | ||
out.push(value); | ||
totalSize += value.byteLength; | ||
} | ||
await closePromise; | ||
const concatenated = new Uint8Array(totalSize); | ||
let offset = 0; | ||
for (const array of out) { | ||
concatenated.set(array, offset); | ||
offset += array.byteLength; | ||
} | ||
return concatenated; | ||
} | ||
|
||
promise_test(async () => { | ||
const response = await fetch(LARGE_FILE); | ||
const buffer = await response.arrayBuffer(); | ||
const bufferView = new Uint8Array(buffer); | ||
const originalLength = bufferView.length; | ||
const compressedData = await compressArrayBuffer(bufferView, 'deflate'); | ||
const compressedLength = compressedData.length; | ||
assert_less_than(compressedLength, originalLength, 'output should be smaller'); | ||
}, 'the length of deflated data should be shorter than that of the original data'); | ||
|
||
promise_test(async () => { | ||
const response = await fetch(LARGE_FILE); | ||
const buffer = await response.arrayBuffer(); | ||
const bufferView = new Uint8Array(buffer); | ||
const originalLength = bufferView.length; | ||
const compressedData = await compressArrayBuffer(bufferView, 'gzip'); | ||
const compressedLength = compressedData.length; | ||
assert_less_than(compressedLength, originalLength, 'output should be smaller'); | ||
}, 'the length of gzipped data should be shorter than that of the original data'); | ||
|
||
promise_test(async () => { | ||
const response = await fetch(LARGE_FILE); | ||
const buffer = await response.arrayBuffer(); | ||
const bufferView = new Uint8Array(buffer); | ||
const originalLength = bufferView.length; | ||
const compressedData = await compressArrayBuffer(bufferView, 'deflate-raw'); | ||
const compressedLength = compressedData.length; | ||
assert_less_than(compressedLength, originalLength, 'output should be smaller'); | ||
}, 'the length of deflated (with -raw) data should be shorter than that of the original data'); |
Oops, something went wrong.