forked from nodejs/node
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
test,stream: enable compression WPTs
PR-URL: nodejs#50631 Reviewed-By: James M Snell <[email protected]> Reviewed-By: Matteo Collina <[email protected]>
- Loading branch information
1 parent
314c8f9
commit 635a5c8
Showing
33 changed files
with
1,448 additions
and
7 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
spec: https://wicg.github.io/compression/ | ||
suggested_reviewers: | ||
- ricea |
74 changes: 74 additions & 0 deletions
74
test/fixtures/wpt/compression/compression-bad-chunks.tentative.any.js
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,74 @@ | ||
// META: global=window,worker,shadowrealm | ||
|
||
'use strict'; | ||
|
||
const badChunks = [ | ||
{ | ||
name: 'undefined', | ||
value: undefined | ||
}, | ||
{ | ||
name: 'null', | ||
value: null | ||
}, | ||
{ | ||
name: 'numeric', | ||
value: 3.14 | ||
}, | ||
{ | ||
name: 'object, not BufferSource', | ||
value: {} | ||
}, | ||
{ | ||
name: 'array', | ||
value: [65] | ||
}, | ||
{ | ||
name: 'SharedArrayBuffer', | ||
// Use a getter to postpone construction so that all tests don't fail where | ||
// SharedArrayBuffer is not yet implemented. | ||
get value() { | ||
// See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()` | ||
return new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer; | ||
} | ||
}, | ||
{ | ||
name: 'shared Uint8Array', | ||
get value() { | ||
// See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()` | ||
return new Uint8Array(new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer) | ||
} | ||
}, | ||
]; | ||
|
||
for (const chunk of badChunks) { | ||
promise_test(async t => { | ||
const cs = new CompressionStream('gzip'); | ||
const reader = cs.readable.getReader(); | ||
const writer = cs.writable.getWriter(); | ||
const writePromise = writer.write(chunk.value); | ||
const readPromise = reader.read(); | ||
await promise_rejects_js(t, TypeError, writePromise, 'write should reject'); | ||
await promise_rejects_js(t, TypeError, readPromise, 'read should reject'); | ||
}, `chunk of type ${chunk.name} should error the stream for gzip`); | ||
|
||
promise_test(async t => { | ||
const cs = new CompressionStream('deflate'); | ||
const reader = cs.readable.getReader(); | ||
const writer = cs.writable.getWriter(); | ||
const writePromise = writer.write(chunk.value); | ||
const readPromise = reader.read(); | ||
await promise_rejects_js(t, TypeError, writePromise, 'write should reject'); | ||
await promise_rejects_js(t, TypeError, readPromise, 'read should reject'); | ||
}, `chunk of type ${chunk.name} should error the stream for deflate`); | ||
|
||
promise_test(async t => { | ||
const cs = new CompressionStream('deflate-raw'); | ||
const reader = cs.readable.getReader(); | ||
const writer = cs.writable.getWriter(); | ||
const writePromise = writer.write(chunk.value); | ||
const readPromise = reader.read(); | ||
await promise_rejects_js(t, TypeError, writePromise, 'write should reject'); | ||
await promise_rejects_js(t, TypeError, readPromise, 'read should reject'); | ||
}, `chunk of type ${chunk.name} should error the stream for deflate-raw`); | ||
} |
15 changes: 15 additions & 0 deletions
15
test/fixtures/wpt/compression/compression-constructor-error.tentative.any.js
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
// META: global=window,worker,shadowrealm | ||
|
||
'use strict'; | ||
|
||
test(t => { | ||
assert_throws_js(TypeError, () => new CompressionStream('a'), 'constructor should throw'); | ||
}, '"a" should cause the constructor to throw'); | ||
|
||
test(t => { | ||
assert_throws_js(TypeError, () => new CompressionStream(), 'constructor should throw'); | ||
}, 'no input should cause the constructor to throw'); | ||
|
||
test(t => { | ||
assert_throws_js(Error, () => new CompressionStream({ toString() { throw Error(); } }), 'constructor should throw'); | ||
}, 'non-string input should cause the constructor to throw'); |
63 changes: 63 additions & 0 deletions
63
test/fixtures/wpt/compression/compression-including-empty-chunk.tentative.any.js
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,63 @@ | ||
// META: global=window,worker,shadowrealm | ||
// META: script=third_party/pako/pako_inflate.min.js | ||
// META: timeout=long | ||
|
||
'use strict'; | ||
|
||
// This test asserts that compressing '' doesn't affect the compressed data. | ||
// Example: compressing ['Hello', '', 'Hello'] results in 'HelloHello' | ||
|
||
async function compressChunkList(chunkList, format) { | ||
const cs = new CompressionStream(format); | ||
const writer = cs.writable.getWriter(); | ||
for (const chunk of chunkList) { | ||
const chunkByte = new TextEncoder().encode(chunk); | ||
writer.write(chunkByte); | ||
} | ||
const closePromise = writer.close(); | ||
const out = []; | ||
const reader = cs.readable.getReader(); | ||
let totalSize = 0; | ||
while (true) { | ||
const { value, done } = await reader.read(); | ||
if (done) | ||
break; | ||
out.push(value); | ||
totalSize += value.byteLength; | ||
} | ||
await closePromise; | ||
const concatenated = new Uint8Array(totalSize); | ||
let offset = 0; | ||
for (const array of out) { | ||
concatenated.set(array, offset); | ||
offset += array.byteLength; | ||
} | ||
return concatenated; | ||
} | ||
|
||
const chunkLists = [ | ||
['', 'Hello', 'Hello'], | ||
['Hello', '', 'Hello'], | ||
['Hello', 'Hello', ''] | ||
]; | ||
const expectedValue = new TextEncoder().encode('HelloHello'); | ||
|
||
for (const chunkList of chunkLists) { | ||
promise_test(async t => { | ||
const compressedData = await compressChunkList(chunkList, 'deflate'); | ||
// decompress with pako, and check that we got the same result as our original string | ||
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); | ||
}, `the result of compressing [${chunkList}] with deflate should be 'HelloHello'`); | ||
|
||
promise_test(async t => { | ||
const compressedData = await compressChunkList(chunkList, 'gzip'); | ||
// decompress with pako, and check that we got the same result as our original string | ||
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); | ||
}, `the result of compressing [${chunkList}] with gzip should be 'HelloHello'`); | ||
|
||
promise_test(async t => { | ||
const compressedData = await compressChunkList(chunkList, 'deflate-raw'); | ||
// decompress with pako, and check that we got the same result as our original string | ||
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match'); | ||
}, `the result of compressing [${chunkList}] with deflate-raw should be 'HelloHello'`); | ||
} |
41 changes: 41 additions & 0 deletions
41
test/fixtures/wpt/compression/compression-large-flush-output.any.js
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
// META: global=window,worker,shadowrealm | ||
// META: script=third_party/pako/pako_inflate.min.js | ||
// META: script=resources/concatenate-stream.js | ||
// META: timeout=long | ||
|
||
'use strict'; | ||
|
||
// This test verifies that a large flush output will not truncate the | ||
// final results. | ||
|
||
async function compressData(chunk, format) { | ||
const cs = new CompressionStream(format); | ||
const writer = cs.writable.getWriter(); | ||
writer.write(chunk); | ||
writer.close(); | ||
return await concatenateStream(cs.readable); | ||
} | ||
|
||
// JSON-encoded array of 10 thousands numbers ("[0,1,2,...]"). This produces 48_891 bytes of data. | ||
const fullData = new TextEncoder().encode(JSON.stringify(Array.from({ length: 10_000 }, (_, i) => i))); | ||
const data = fullData.subarray(0, 35_579); | ||
const expectedValue = data; | ||
|
||
promise_test(async t => { | ||
const compressedData = await compressData(data, 'deflate'); | ||
// decompress with pako, and check that we got the same result as our original string | ||
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); | ||
}, `deflate compression with large flush output`); | ||
|
||
promise_test(async t => { | ||
const compressedData = await compressData(data, 'gzip'); | ||
// decompress with pako, and check that we got the same result as our original string | ||
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); | ||
}, `gzip compression with large flush output`); | ||
|
||
promise_test(async t => { | ||
const compressedData = await compressData(data, 'deflate-raw'); | ||
// decompress with pako, and check that we got the same result as our original string | ||
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match'); | ||
}, `deflate-raw compression with large flush output`); | ||
|
67 changes: 67 additions & 0 deletions
67
test/fixtures/wpt/compression/compression-multiple-chunks.tentative.any.js
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,67 @@ | ||
// META: global=window,worker,shadowrealm | ||
// META: script=third_party/pako/pako_inflate.min.js | ||
// META: timeout=long | ||
|
||
'use strict'; | ||
|
||
// This test asserts that compressing multiple chunks should work. | ||
|
||
// Example: ('Hello', 3) => TextEncoder().encode('HelloHelloHello') | ||
function makeExpectedChunk(input, numberOfChunks) { | ||
const expectedChunk = input.repeat(numberOfChunks); | ||
return new TextEncoder().encode(expectedChunk); | ||
} | ||
|
||
// Example: ('Hello', 3, 'deflate') => compress ['Hello', 'Hello', Hello'] | ||
async function compressMultipleChunks(input, numberOfChunks, format) { | ||
const cs = new CompressionStream(format); | ||
const writer = cs.writable.getWriter(); | ||
const chunk = new TextEncoder().encode(input); | ||
for (let i = 0; i < numberOfChunks; ++i) { | ||
writer.write(chunk); | ||
} | ||
const closePromise = writer.close(); | ||
const out = []; | ||
const reader = cs.readable.getReader(); | ||
let totalSize = 0; | ||
while (true) { | ||
const { value, done } = await reader.read(); | ||
if (done) | ||
break; | ||
out.push(value); | ||
totalSize += value.byteLength; | ||
} | ||
await closePromise; | ||
const concatenated = new Uint8Array(totalSize); | ||
let offset = 0; | ||
for (const array of out) { | ||
concatenated.set(array, offset); | ||
offset += array.byteLength; | ||
} | ||
return concatenated; | ||
} | ||
|
||
const hello = 'Hello'; | ||
|
||
for (let numberOfChunks = 2; numberOfChunks <= 16; ++numberOfChunks) { | ||
promise_test(async t => { | ||
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate'); | ||
const expectedValue = makeExpectedChunk(hello, numberOfChunks); | ||
// decompress with pako, and check that we got the same result as our original string | ||
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); | ||
}, `compressing ${numberOfChunks} chunks with deflate should work`); | ||
|
||
promise_test(async t => { | ||
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'gzip'); | ||
const expectedValue = makeExpectedChunk(hello, numberOfChunks); | ||
// decompress with pako, and check that we got the same result as our original string | ||
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); | ||
}, `compressing ${numberOfChunks} chunks with gzip should work`); | ||
|
||
promise_test(async t => { | ||
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate-raw'); | ||
const expectedValue = makeExpectedChunk(hello, numberOfChunks); | ||
// decompress with pako, and check that we got the same result as our original string | ||
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match'); | ||
}, `compressing ${numberOfChunks} chunks with deflate-raw should work`); | ||
} |
Oops, something went wrong.