Skip to content

Commit

Permalink
test,stream: enable compression WPTs
Browse files Browse the repository at this point in the history
PR-URL: #50631
Reviewed-By: James M Snell <jasnell@gmail.com>
Reviewed-By: Matteo Collina <matteo.collina@gmail.com>
  • Loading branch information
panva authored and targos committed Nov 23, 2023
1 parent 0bd694a commit 7ebc8c2
Show file tree
Hide file tree
Showing 33 changed files with 1,448 additions and 7 deletions.
19 changes: 13 additions & 6 deletions test/common/wpt.js
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,7 @@ class ResourceLoader {
const data = await fsPromises.readFile(file);
return {
ok: true,
arrayBuffer() { return data.buffer; },
json() { return JSON.parse(data.toString()); },
text() { return data.toString(); },
};
Expand Down Expand Up @@ -382,7 +383,7 @@ const kIntlRequirement = {
// TODO(joyeecheung): we may need to deal with --with-intl=system-icu
};

class IntlRequirement {
class BuildRequirement {
constructor() {
this.currentIntl = kIntlRequirement.none;
if (process.config.variables.v8_enable_i18n_support === 0) {
Expand All @@ -395,6 +396,9 @@ class IntlRequirement {
} else {
this.currentIntl = kIntlRequirement.full;
}
// Not using common.hasCrypto because of the global leak checks
this.hasCrypto = Boolean(process.versions.openssl) &&
!process.env.NODE_SKIP_CRYPTO;
}

/**
Expand All @@ -409,11 +413,14 @@ class IntlRequirement {
if (requires.has('small-icu') && current < kIntlRequirement.small) {
return 'small-icu';
}
if (requires.has('crypto') && !this.hasCrypto) {
return 'crypto';
}
return false;
}
}

const intlRequirements = new IntlRequirement();
const buildRequirements = new BuildRequirement();

class StatusLoader {
/**
Expand All @@ -440,7 +447,7 @@ class StatusLoader {
const list = this.grep(filepath);
result = result.concat(list);
} else {
if (!(/\.\w+\.js$/.test(filepath)) || filepath.endsWith('.helper.js')) {
if (!(/\.\w+\.js$/.test(filepath))) {
continue;
}
result.push(filepath);
Expand Down Expand Up @@ -945,9 +952,9 @@ class WPTRunner {
continue;
}

const lackingIntl = intlRequirements.isLacking(spec.requires);
if (lackingIntl) {
this.skip(spec, [ `requires ${lackingIntl}` ]);
const lackingSupport = buildRequirements.isLacking(spec.requires);
if (lackingSupport) {
this.skip(spec, [ `requires ${lackingSupport}` ]);
continue;
}

Expand Down
1 change: 1 addition & 0 deletions test/fixtures/wpt/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ See [test/wpt](../../wpt/README.md) for information on how these tests are run.
Last update:

- common: https://github.com/web-platform-tests/wpt/tree/dbd648158d/common
- compression: https://github.com/web-platform-tests/wpt/tree/c82521cfa5/compression
- console: https://github.com/web-platform-tests/wpt/tree/767ae35464/console
- dom/abort: https://github.com/web-platform-tests/wpt/tree/d1f1ecbd52/dom/abort
- dom/events: https://github.com/web-platform-tests/wpt/tree/ab8999891c/dom/events
Expand Down
3 changes: 3 additions & 0 deletions test/fixtures/wpt/compression/META.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
spec: https://wicg.github.io/compression/
suggested_reviewers:
- ricea
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
// META: global=window,worker,shadowrealm

'use strict';

const badChunks = [
{
name: 'undefined',
value: undefined
},
{
name: 'null',
value: null
},
{
name: 'numeric',
value: 3.14
},
{
name: 'object, not BufferSource',
value: {}
},
{
name: 'array',
value: [65]
},
{
name: 'SharedArrayBuffer',
// Use a getter to postpone construction so that all tests don't fail where
// SharedArrayBuffer is not yet implemented.
get value() {
// See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()`
return new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer;
}
},
{
name: 'shared Uint8Array',
get value() {
// See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()`
return new Uint8Array(new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer)
}
},
];

for (const chunk of badChunks) {
promise_test(async t => {
const cs = new CompressionStream('gzip');
const reader = cs.readable.getReader();
const writer = cs.writable.getWriter();
const writePromise = writer.write(chunk.value);
const readPromise = reader.read();
await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
}, `chunk of type ${chunk.name} should error the stream for gzip`);

promise_test(async t => {
const cs = new CompressionStream('deflate');
const reader = cs.readable.getReader();
const writer = cs.writable.getWriter();
const writePromise = writer.write(chunk.value);
const readPromise = reader.read();
await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
}, `chunk of type ${chunk.name} should error the stream for deflate`);

promise_test(async t => {
const cs = new CompressionStream('deflate-raw');
const reader = cs.readable.getReader();
const writer = cs.writable.getWriter();
const writePromise = writer.write(chunk.value);
const readPromise = reader.read();
await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
}, `chunk of type ${chunk.name} should error the stream for deflate-raw`);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
// META: global=window,worker,shadowrealm

'use strict';

test(t => {
assert_throws_js(TypeError, () => new CompressionStream('a'), 'constructor should throw');
}, '"a" should cause the constructor to throw');

test(t => {
assert_throws_js(TypeError, () => new CompressionStream(), 'constructor should throw');
}, 'no input should cause the constructor to throw');

test(t => {
assert_throws_js(Error, () => new CompressionStream({ toString() { throw Error(); } }), 'constructor should throw');
}, 'non-string input should cause the constructor to throw');
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
// META: global=window,worker,shadowrealm
// META: script=third_party/pako/pako_inflate.min.js
// META: timeout=long

'use strict';

// This test asserts that compressing '' doesn't affect the compressed data.
// Example: compressing ['Hello', '', 'Hello'] results in 'HelloHello'

async function compressChunkList(chunkList, format) {
const cs = new CompressionStream(format);
const writer = cs.writable.getWriter();
for (const chunk of chunkList) {
const chunkByte = new TextEncoder().encode(chunk);
writer.write(chunkByte);
}
const closePromise = writer.close();
const out = [];
const reader = cs.readable.getReader();
let totalSize = 0;
while (true) {
const { value, done } = await reader.read();
if (done)
break;
out.push(value);
totalSize += value.byteLength;
}
await closePromise;
const concatenated = new Uint8Array(totalSize);
let offset = 0;
for (const array of out) {
concatenated.set(array, offset);
offset += array.byteLength;
}
return concatenated;
}

const chunkLists = [
['', 'Hello', 'Hello'],
['Hello', '', 'Hello'],
['Hello', 'Hello', '']
];
const expectedValue = new TextEncoder().encode('HelloHello');

for (const chunkList of chunkLists) {
promise_test(async t => {
const compressedData = await compressChunkList(chunkList, 'deflate');
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
}, `the result of compressing [${chunkList}] with deflate should be 'HelloHello'`);

promise_test(async t => {
const compressedData = await compressChunkList(chunkList, 'gzip');
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
}, `the result of compressing [${chunkList}] with gzip should be 'HelloHello'`);

promise_test(async t => {
const compressedData = await compressChunkList(chunkList, 'deflate-raw');
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
}, `the result of compressing [${chunkList}] with deflate-raw should be 'HelloHello'`);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
// META: global=window,worker,shadowrealm
// META: script=third_party/pako/pako_inflate.min.js
// META: script=resources/concatenate-stream.js
// META: timeout=long

'use strict';

// This test verifies that a large flush output will not truncate the
// final results.

async function compressData(chunk, format) {
const cs = new CompressionStream(format);
const writer = cs.writable.getWriter();
writer.write(chunk);
writer.close();
return await concatenateStream(cs.readable);
}

// JSON-encoded array of 10 thousands numbers ("[0,1,2,...]"). This produces 48_891 bytes of data.
const fullData = new TextEncoder().encode(JSON.stringify(Array.from({ length: 10_000 }, (_, i) => i)));
const data = fullData.subarray(0, 35_579);
const expectedValue = data;

promise_test(async t => {
const compressedData = await compressData(data, 'deflate');
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
}, `deflate compression with large flush output`);

promise_test(async t => {
const compressedData = await compressData(data, 'gzip');
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
}, `gzip compression with large flush output`);

promise_test(async t => {
const compressedData = await compressData(data, 'deflate-raw');
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
}, `deflate-raw compression with large flush output`);

Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
// META: global=window,worker,shadowrealm
// META: script=third_party/pako/pako_inflate.min.js
// META: timeout=long

'use strict';

// This test asserts that compressing multiple chunks should work.

// Example: ('Hello', 3) => TextEncoder().encode('HelloHelloHello')
function makeExpectedChunk(input, numberOfChunks) {
const expectedChunk = input.repeat(numberOfChunks);
return new TextEncoder().encode(expectedChunk);
}

// Example: ('Hello', 3, 'deflate') => compress ['Hello', 'Hello', Hello']
async function compressMultipleChunks(input, numberOfChunks, format) {
const cs = new CompressionStream(format);
const writer = cs.writable.getWriter();
const chunk = new TextEncoder().encode(input);
for (let i = 0; i < numberOfChunks; ++i) {
writer.write(chunk);
}
const closePromise = writer.close();
const out = [];
const reader = cs.readable.getReader();
let totalSize = 0;
while (true) {
const { value, done } = await reader.read();
if (done)
break;
out.push(value);
totalSize += value.byteLength;
}
await closePromise;
const concatenated = new Uint8Array(totalSize);
let offset = 0;
for (const array of out) {
concatenated.set(array, offset);
offset += array.byteLength;
}
return concatenated;
}

const hello = 'Hello';

for (let numberOfChunks = 2; numberOfChunks <= 16; ++numberOfChunks) {
promise_test(async t => {
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate');
const expectedValue = makeExpectedChunk(hello, numberOfChunks);
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
}, `compressing ${numberOfChunks} chunks with deflate should work`);

promise_test(async t => {
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'gzip');
const expectedValue = makeExpectedChunk(hello, numberOfChunks);
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
}, `compressing ${numberOfChunks} chunks with gzip should work`);

promise_test(async t => {
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate-raw');
const expectedValue = makeExpectedChunk(hello, numberOfChunks);
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
}, `compressing ${numberOfChunks} chunks with deflate-raw should work`);
}
Loading

0 comments on commit 7ebc8c2

Please sign in to comment.