diff --git a/test/common/wpt.js b/test/common/wpt.js index 7d0b030614b003..a65a0740d155d8 100644 --- a/test/common/wpt.js +++ b/test/common/wpt.js @@ -210,6 +210,7 @@ class ResourceLoader { const data = await fsPromises.readFile(file); return { ok: true, + arrayBuffer() { return data.buffer; }, json() { return JSON.parse(data.toString()); }, text() { return data.toString(); }, }; @@ -382,7 +383,7 @@ const kIntlRequirement = { // TODO(joyeecheung): we may need to deal with --with-intl=system-icu }; -class IntlRequirement { +class BuildRequirement { constructor() { this.currentIntl = kIntlRequirement.none; if (process.config.variables.v8_enable_i18n_support === 0) { @@ -395,6 +396,9 @@ class IntlRequirement { } else { this.currentIntl = kIntlRequirement.full; } + // Not using common.hasCrypto because of the global leak checks + this.hasCrypto = Boolean(process.versions.openssl) && + !process.env.NODE_SKIP_CRYPTO; } /** @@ -409,11 +413,14 @@ class IntlRequirement { if (requires.has('small-icu') && current < kIntlRequirement.small) { return 'small-icu'; } + if (requires.has('crypto') && !this.hasCrypto) { + return 'crypto'; + } return false; } } -const intlRequirements = new IntlRequirement(); +const buildRequirements = new BuildRequirement(); class StatusLoader { /** @@ -440,7 +447,7 @@ class StatusLoader { const list = this.grep(filepath); result = result.concat(list); } else { - if (!(/\.\w+\.js$/.test(filepath)) || filepath.endsWith('.helper.js')) { + if (!(/\.\w+\.js$/.test(filepath))) { continue; } result.push(filepath); @@ -945,9 +952,9 @@ class WPTRunner { continue; } - const lackingIntl = intlRequirements.isLacking(spec.requires); - if (lackingIntl) { - this.skip(spec, [ `requires ${lackingIntl}` ]); + const lackingSupport = buildRequirements.isLacking(spec.requires); + if (lackingSupport) { + this.skip(spec, [ `requires ${lackingSupport}` ]); continue; } diff --git a/test/fixtures/wpt/README.md b/test/fixtures/wpt/README.md index 6404571a498200..1c7121feb30c9e 100644 --- a/test/fixtures/wpt/README.md +++ b/test/fixtures/wpt/README.md @@ -11,6 +11,7 @@ See [test/wpt](../../wpt/README.md) for information on how these tests are run. Last update: - common: https://github.com/web-platform-tests/wpt/tree/dbd648158d/common +- compression: https://github.com/web-platform-tests/wpt/tree/c82521cfa5/compression - console: https://github.com/web-platform-tests/wpt/tree/767ae35464/console - dom/abort: https://github.com/web-platform-tests/wpt/tree/d1f1ecbd52/dom/abort - dom/events: https://github.com/web-platform-tests/wpt/tree/ab8999891c/dom/events diff --git a/test/fixtures/wpt/compression/META.yml b/test/fixtures/wpt/compression/META.yml new file mode 100644 index 00000000000000..0afbe29a53e807 --- /dev/null +++ b/test/fixtures/wpt/compression/META.yml @@ -0,0 +1,3 @@ +spec: https://wicg.github.io/compression/ +suggested_reviewers: + - ricea diff --git a/test/fixtures/wpt/compression/compression-bad-chunks.tentative.any.js b/test/fixtures/wpt/compression/compression-bad-chunks.tentative.any.js new file mode 100644 index 00000000000000..2d0b5684733930 --- /dev/null +++ b/test/fixtures/wpt/compression/compression-bad-chunks.tentative.any.js @@ -0,0 +1,74 @@ +// META: global=window,worker,shadowrealm + +'use strict'; + +const badChunks = [ + { + name: 'undefined', + value: undefined + }, + { + name: 'null', + value: null + }, + { + name: 'numeric', + value: 3.14 + }, + { + name: 'object, not BufferSource', + value: {} + }, + { + name: 'array', + value: [65] + }, + { + name: 'SharedArrayBuffer', + // Use a getter to postpone construction so that all tests don't fail where + // SharedArrayBuffer is not yet implemented. + get value() { + // See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()` + return new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer; + } + }, + { + name: 'shared Uint8Array', + get value() { + // See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()` + return new Uint8Array(new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer) + } + }, +]; + +for (const chunk of badChunks) { + promise_test(async t => { + const cs = new CompressionStream('gzip'); + const reader = cs.readable.getReader(); + const writer = cs.writable.getWriter(); + const writePromise = writer.write(chunk.value); + const readPromise = reader.read(); + await promise_rejects_js(t, TypeError, writePromise, 'write should reject'); + await promise_rejects_js(t, TypeError, readPromise, 'read should reject'); + }, `chunk of type ${chunk.name} should error the stream for gzip`); + + promise_test(async t => { + const cs = new CompressionStream('deflate'); + const reader = cs.readable.getReader(); + const writer = cs.writable.getWriter(); + const writePromise = writer.write(chunk.value); + const readPromise = reader.read(); + await promise_rejects_js(t, TypeError, writePromise, 'write should reject'); + await promise_rejects_js(t, TypeError, readPromise, 'read should reject'); + }, `chunk of type ${chunk.name} should error the stream for deflate`); + + promise_test(async t => { + const cs = new CompressionStream('deflate-raw'); + const reader = cs.readable.getReader(); + const writer = cs.writable.getWriter(); + const writePromise = writer.write(chunk.value); + const readPromise = reader.read(); + await promise_rejects_js(t, TypeError, writePromise, 'write should reject'); + await promise_rejects_js(t, TypeError, readPromise, 'read should reject'); + }, `chunk of type ${chunk.name} should error the stream for deflate-raw`); +} diff --git a/test/fixtures/wpt/compression/compression-constructor-error.tentative.any.js b/test/fixtures/wpt/compression/compression-constructor-error.tentative.any.js new file mode 100644 index 00000000000000..b39ab93bd02aba --- /dev/null +++ b/test/fixtures/wpt/compression/compression-constructor-error.tentative.any.js @@ -0,0 +1,15 @@ +// META: global=window,worker,shadowrealm + +'use strict'; + +test(t => { + assert_throws_js(TypeError, () => new CompressionStream('a'), 'constructor should throw'); +}, '"a" should cause the constructor to throw'); + +test(t => { + assert_throws_js(TypeError, () => new CompressionStream(), 'constructor should throw'); +}, 'no input should cause the constructor to throw'); + +test(t => { + assert_throws_js(Error, () => new CompressionStream({ toString() { throw Error(); } }), 'constructor should throw'); +}, 'non-string input should cause the constructor to throw'); diff --git a/test/fixtures/wpt/compression/compression-including-empty-chunk.tentative.any.js b/test/fixtures/wpt/compression/compression-including-empty-chunk.tentative.any.js new file mode 100644 index 00000000000000..a7fd1ceb24f086 --- /dev/null +++ b/test/fixtures/wpt/compression/compression-including-empty-chunk.tentative.any.js @@ -0,0 +1,63 @@ +// META: global=window,worker,shadowrealm +// META: script=third_party/pako/pako_inflate.min.js +// META: timeout=long + +'use strict'; + +// This test asserts that compressing '' doesn't affect the compressed data. +// Example: compressing ['Hello', '', 'Hello'] results in 'HelloHello' + +async function compressChunkList(chunkList, format) { + const cs = new CompressionStream(format); + const writer = cs.writable.getWriter(); + for (const chunk of chunkList) { + const chunkByte = new TextEncoder().encode(chunk); + writer.write(chunkByte); + } + const closePromise = writer.close(); + const out = []; + const reader = cs.readable.getReader(); + let totalSize = 0; + while (true) { + const { value, done } = await reader.read(); + if (done) + break; + out.push(value); + totalSize += value.byteLength; + } + await closePromise; + const concatenated = new Uint8Array(totalSize); + let offset = 0; + for (const array of out) { + concatenated.set(array, offset); + offset += array.byteLength; + } + return concatenated; +} + +const chunkLists = [ + ['', 'Hello', 'Hello'], + ['Hello', '', 'Hello'], + ['Hello', 'Hello', ''] +]; +const expectedValue = new TextEncoder().encode('HelloHello'); + +for (const chunkList of chunkLists) { + promise_test(async t => { + const compressedData = await compressChunkList(chunkList, 'deflate'); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); + }, `the result of compressing [${chunkList}] with deflate should be 'HelloHello'`); + + promise_test(async t => { + const compressedData = await compressChunkList(chunkList, 'gzip'); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); + }, `the result of compressing [${chunkList}] with gzip should be 'HelloHello'`); + + promise_test(async t => { + const compressedData = await compressChunkList(chunkList, 'deflate-raw'); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match'); + }, `the result of compressing [${chunkList}] with deflate-raw should be 'HelloHello'`); +} diff --git a/test/fixtures/wpt/compression/compression-large-flush-output.any.js b/test/fixtures/wpt/compression/compression-large-flush-output.any.js new file mode 100644 index 00000000000000..6afcb4d52875b9 --- /dev/null +++ b/test/fixtures/wpt/compression/compression-large-flush-output.any.js @@ -0,0 +1,41 @@ +// META: global=window,worker,shadowrealm +// META: script=third_party/pako/pako_inflate.min.js +// META: script=resources/concatenate-stream.js +// META: timeout=long + +'use strict'; + +// This test verifies that a large flush output will not truncate the +// final results. + +async function compressData(chunk, format) { + const cs = new CompressionStream(format); + const writer = cs.writable.getWriter(); + writer.write(chunk); + writer.close(); + return await concatenateStream(cs.readable); +} + +// JSON-encoded array of 10 thousands numbers ("[0,1,2,...]"). This produces 48_891 bytes of data. +const fullData = new TextEncoder().encode(JSON.stringify(Array.from({ length: 10_000 }, (_, i) => i))); +const data = fullData.subarray(0, 35_579); +const expectedValue = data; + +promise_test(async t => { + const compressedData = await compressData(data, 'deflate'); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); +}, `deflate compression with large flush output`); + +promise_test(async t => { + const compressedData = await compressData(data, 'gzip'); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); +}, `gzip compression with large flush output`); + +promise_test(async t => { + const compressedData = await compressData(data, 'deflate-raw'); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match'); +}, `deflate-raw compression with large flush output`); + diff --git a/test/fixtures/wpt/compression/compression-multiple-chunks.tentative.any.js b/test/fixtures/wpt/compression/compression-multiple-chunks.tentative.any.js new file mode 100644 index 00000000000000..28a90e5ca53902 --- /dev/null +++ b/test/fixtures/wpt/compression/compression-multiple-chunks.tentative.any.js @@ -0,0 +1,67 @@ +// META: global=window,worker,shadowrealm +// META: script=third_party/pako/pako_inflate.min.js +// META: timeout=long + +'use strict'; + +// This test asserts that compressing multiple chunks should work. + +// Example: ('Hello', 3) => TextEncoder().encode('HelloHelloHello') +function makeExpectedChunk(input, numberOfChunks) { + const expectedChunk = input.repeat(numberOfChunks); + return new TextEncoder().encode(expectedChunk); +} + +// Example: ('Hello', 3, 'deflate') => compress ['Hello', 'Hello', Hello'] +async function compressMultipleChunks(input, numberOfChunks, format) { + const cs = new CompressionStream(format); + const writer = cs.writable.getWriter(); + const chunk = new TextEncoder().encode(input); + for (let i = 0; i < numberOfChunks; ++i) { + writer.write(chunk); + } + const closePromise = writer.close(); + const out = []; + const reader = cs.readable.getReader(); + let totalSize = 0; + while (true) { + const { value, done } = await reader.read(); + if (done) + break; + out.push(value); + totalSize += value.byteLength; + } + await closePromise; + const concatenated = new Uint8Array(totalSize); + let offset = 0; + for (const array of out) { + concatenated.set(array, offset); + offset += array.byteLength; + } + return concatenated; +} + +const hello = 'Hello'; + +for (let numberOfChunks = 2; numberOfChunks <= 16; ++numberOfChunks) { + promise_test(async t => { + const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate'); + const expectedValue = makeExpectedChunk(hello, numberOfChunks); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); + }, `compressing ${numberOfChunks} chunks with deflate should work`); + + promise_test(async t => { + const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'gzip'); + const expectedValue = makeExpectedChunk(hello, numberOfChunks); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match'); + }, `compressing ${numberOfChunks} chunks with gzip should work`); + + promise_test(async t => { + const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate-raw'); + const expectedValue = makeExpectedChunk(hello, numberOfChunks); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match'); + }, `compressing ${numberOfChunks} chunks with deflate-raw should work`); +} diff --git a/test/fixtures/wpt/compression/compression-output-length.tentative.any.js b/test/fixtures/wpt/compression/compression-output-length.tentative.any.js new file mode 100644 index 00000000000000..7aa13734500d26 --- /dev/null +++ b/test/fixtures/wpt/compression/compression-output-length.tentative.any.js @@ -0,0 +1,64 @@ +// META: global=window,worker,shadowrealm + +'use strict'; + +// This test asserts that compressed data length is shorter than the original +// data length. If the input is extremely small, the compressed data may be +// larger than the original data. + +const LARGE_FILE = '/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm'; + +async function compressArrayBuffer(input, format) { + const cs = new CompressionStream(format); + const writer = cs.writable.getWriter(); + writer.write(input); + const closePromise = writer.close(); + const out = []; + const reader = cs.readable.getReader(); + let totalSize = 0; + while (true) { + const { value, done } = await reader.read(); + if (done) + break; + out.push(value); + totalSize += value.byteLength; + } + await closePromise; + const concatenated = new Uint8Array(totalSize); + let offset = 0; + for (const array of out) { + concatenated.set(array, offset); + offset += array.byteLength; + } + return concatenated; +} + +promise_test(async () => { + const response = await fetch(LARGE_FILE); + const buffer = await response.arrayBuffer(); + const bufferView = new Uint8Array(buffer); + const originalLength = bufferView.length; + const compressedData = await compressArrayBuffer(bufferView, 'deflate'); + const compressedLength = compressedData.length; + assert_less_than(compressedLength, originalLength, 'output should be smaller'); +}, 'the length of deflated data should be shorter than that of the original data'); + +promise_test(async () => { + const response = await fetch(LARGE_FILE); + const buffer = await response.arrayBuffer(); + const bufferView = new Uint8Array(buffer); + const originalLength = bufferView.length; + const compressedData = await compressArrayBuffer(bufferView, 'gzip'); + const compressedLength = compressedData.length; + assert_less_than(compressedLength, originalLength, 'output should be smaller'); +}, 'the length of gzipped data should be shorter than that of the original data'); + +promise_test(async () => { + const response = await fetch(LARGE_FILE); + const buffer = await response.arrayBuffer(); + const bufferView = new Uint8Array(buffer); + const originalLength = bufferView.length; + const compressedData = await compressArrayBuffer(bufferView, 'deflate-raw'); + const compressedLength = compressedData.length; + assert_less_than(compressedLength, originalLength, 'output should be smaller'); +}, 'the length of deflated (with -raw) data should be shorter than that of the original data'); diff --git a/test/fixtures/wpt/compression/compression-stream.tentative.any.js b/test/fixtures/wpt/compression/compression-stream.tentative.any.js new file mode 100644 index 00000000000000..a7ea0cb908402f --- /dev/null +++ b/test/fixtures/wpt/compression/compression-stream.tentative.any.js @@ -0,0 +1,91 @@ +// META: global=window,worker,shadowrealm +// META: script=third_party/pako/pako_inflate.min.js +// META: timeout=long + +'use strict'; + +const SMALL_FILE = "/media/foo.vtt"; +const LARGE_FILE = "/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm"; + +async function compressArrayBuffer(input, format) { + const cs = new CompressionStream(format); + const writer = cs.writable.getWriter(); + writer.write(input); + const closePromise = writer.close(); + const out = []; + const reader = cs.readable.getReader(); + let totalSize = 0; + while (true) { + const { value, done } = await reader.read(); + if (done) + break; + out.push(value); + totalSize += value.byteLength; + } + await closePromise; + const concatenated = new Uint8Array(totalSize); + let offset = 0; + for (const array of out) { + concatenated.set(array, offset); + offset += array.byteLength; + } + return concatenated; +} + +test(() => { + assert_throws_js(TypeError, () => { + const transformer = new CompressionStream("nonvalid"); + }, "non supported format should throw"); +}, "CompressionStream constructor should throw on invalid format"); + +promise_test(async () => { + const buffer = new ArrayBuffer(0); + const bufferView = new Uint8Array(buffer); + const compressedData = await compressArrayBuffer(bufferView, "deflate"); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(bufferView, pako.inflate(compressedData)); +}, "deflated empty data should be reinflated back to its origin"); + +promise_test(async () => { + const response = await fetch(SMALL_FILE) + const buffer = await response.arrayBuffer(); + const bufferView = new Uint8Array(buffer); + const compressedData = await compressArrayBuffer(bufferView, "deflate"); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(bufferView, pako.inflate(compressedData)); +}, "deflated small amount data should be reinflated back to its origin"); + +promise_test(async () => { + const response = await fetch(LARGE_FILE) + const buffer = await response.arrayBuffer(); + const bufferView = new Uint8Array(buffer); + const compressedData = await compressArrayBuffer(bufferView, "deflate"); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(bufferView, pako.inflate(compressedData)); +}, "deflated large amount data should be reinflated back to its origin"); + +promise_test(async () => { + const buffer = new ArrayBuffer(0); + const bufferView = new Uint8Array(buffer); + const compressedData = await compressArrayBuffer(bufferView, "gzip"); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(bufferView, pako.inflate(compressedData)); +}, "gzipped empty data should be reinflated back to its origin"); + +promise_test(async () => { + const response = await fetch(SMALL_FILE) + const buffer = await response.arrayBuffer(); + const bufferView = new Uint8Array(buffer); + const compressedData = await compressArrayBuffer(bufferView, "gzip"); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(bufferView, pako.inflate(compressedData)); +}, "gzipped small amount data should be reinflated back to its origin"); + +promise_test(async () => { + const response = await fetch(LARGE_FILE) + const buffer = await response.arrayBuffer(); + const bufferView = new Uint8Array(buffer); + const compressedData = await compressArrayBuffer(bufferView, "gzip"); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(bufferView, pako.inflate(compressedData)); +}, "gzipped large amount data should be reinflated back to its origin"); diff --git a/test/fixtures/wpt/compression/compression-with-detach.tentative.window.js b/test/fixtures/wpt/compression/compression-with-detach.tentative.window.js new file mode 100644 index 00000000000000..465feaa47d4e9a --- /dev/null +++ b/test/fixtures/wpt/compression/compression-with-detach.tentative.window.js @@ -0,0 +1,55 @@ +// META: global=window,worker,shadowrealm +// META: script=resources/concatenate-stream.js + +'use strict'; + +const kInputLength = 500000; + +function createLargeRandomInput() { + const buffer = new ArrayBuffer(kInputLength); + // The getRandomValues API will only let us get 65536 bytes at a time, so call + // it multiple times. + const kChunkSize = 65536; + for (let offset = 0; offset < kInputLength; offset += kChunkSize) { + const length = + offset + kChunkSize > kInputLength ? kInputLength - offset : kChunkSize; + const view = new Uint8Array(buffer, offset, length); + crypto.getRandomValues(view); + } + return new Uint8Array(buffer); +} + +function decompress(view) { + const ds = new DecompressionStream('deflate'); + const writer = ds.writable.getWriter(); + writer.write(view); + writer.close(); + return concatenateStream(ds.readable); +} + +promise_test(async () => { + const input = createLargeRandomInput(); + const inputCopy = input.slice(0, input.byteLength); + const cs = new CompressionStream('deflate'); + const writer = cs.writable.getWriter(); + writer.write(input); + writer.close(); + // Object.prototype.then will be looked up synchronously when the promise + // returned by read() is resolved. + Object.defineProperty(Object.prototype, 'then', { + get() { + // Cause input to become detached and unreferenced. + try { + postMessage(undefined, 'nowhere', [input.buffer]); + } catch (e) { + // It's already detached. + } + } + }); + const output = await concatenateStream(cs.readable); + // Perform the comparison as strings since this is reasonably fast even when + // JITted JavaScript is running under an emulator. + assert_equals( + inputCopy.toString(), (await decompress(output)).toString(), + 'decompressing the output should return the input'); +}, 'data should be correctly compressed even if input is detached partway'); diff --git a/test/fixtures/wpt/compression/decompression-bad-chunks.tentative.any.js b/test/fixtures/wpt/compression/decompression-bad-chunks.tentative.any.js new file mode 100644 index 00000000000000..f450b0c4cb2553 --- /dev/null +++ b/test/fixtures/wpt/compression/decompression-bad-chunks.tentative.any.js @@ -0,0 +1,85 @@ +// META: global=window,worker,shadowrealm + +'use strict'; + +const badChunks = [ + { + name: 'undefined', + value: undefined + }, + { + name: 'null', + value: null + }, + { + name: 'numeric', + value: 3.14 + }, + { + name: 'object, not BufferSource', + value: {} + }, + { + name: 'array', + value: [65] + }, + { + name: 'SharedArrayBuffer', + // Use a getter to postpone construction so that all tests don't fail where + // SharedArrayBuffer is not yet implemented. + get value() { + // See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()` + return new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer; + } + }, + { + name: 'shared Uint8Array', + get value() { + // See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()` + return new Uint8Array(new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer) + } + }, + { + name: 'invalid deflate bytes', + value: new Uint8Array([0, 156, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 48, 173, 6, 36]) + }, + { + name: 'invalid gzip bytes', + value: new Uint8Array([0, 139, 8, 0, 0, 0, 0, 0, 0, 3, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 176, 1, 57, 179, 15, 0, 0, 0]) + }, +]; + +// Test Case Design +// We need to wait until after we close the writable stream to check if the decoded stream is valid. +// We can end up in a state where all reads/writes are valid, but upon closing the writable stream an error is detected. +// (Example: A zlib encoded chunk w/o the checksum). + +async function decompress(chunk, format, t) +{ + const ds = new DecompressionStream(format); + const reader = ds.readable.getReader(); + const writer = ds.writable.getWriter(); + + writer.write(chunk.value).then(() => {}, () => {}); + reader.read().then(() => {}, () => {}); + + await promise_rejects_js(t, TypeError, writer.close(), 'writer.close() should reject'); + await promise_rejects_js(t, TypeError, writer.closed, 'write.closed should reject'); + + await promise_rejects_js(t, TypeError, reader.read(), 'reader.read() should reject'); + await promise_rejects_js(t, TypeError, reader.closed, 'read.closed should reject'); +} + +for (const chunk of badChunks) { + promise_test(async t => { + await decompress(chunk, 'gzip', t); + }, `chunk of type ${chunk.name} should error the stream for gzip`); + + promise_test(async t => { + await decompress(chunk, 'deflate', t); + }, `chunk of type ${chunk.name} should error the stream for deflate`); + + promise_test(async t => { + await decompress(chunk, 'deflate-raw', t); + }, `chunk of type ${chunk.name} should error the stream for deflate-raw`); +} diff --git a/test/fixtures/wpt/compression/decompression-buffersource.tentative.any.js b/test/fixtures/wpt/compression/decompression-buffersource.tentative.any.js new file mode 100644 index 00000000000000..e81fc566779800 --- /dev/null +++ b/test/fixtures/wpt/compression/decompression-buffersource.tentative.any.js @@ -0,0 +1,192 @@ +// META: global=window,worker,shadowrealm + +'use strict'; + +const compressedBytesWithDeflate = [120, 156, 75, 52, 48, 52, 50, 54, 49, 53, 3, 0, 8, 136, 1, 199]; +const compressedBytesWithGzip = [31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 75, 52, 48, 52, 2, 0, 216, 252, 63, 136, 4, 0, 0, 0]; +const compressedBytesWithDeflateRaw = [ + 0x00, 0x06, 0x00, 0xf9, 0xff, 0x41, 0x42, 0x43, + 0x44, 0x45, 0x46, 0x01, 0x00, 0x00, 0xff, 0xff, +]; +// These chunk values below were chosen to make the length of the compressed +// output be a multiple of 8 bytes. +const deflateExpectedChunkValue = new TextEncoder().encode('a0123456'); +const gzipExpectedChunkValue = new TextEncoder().encode('a012'); +const deflateRawExpectedChunkValue = new TextEncoder().encode('ABCDEF'); + +const bufferSourceChunksForDeflate = [ + { + name: 'ArrayBuffer', + value: new Uint8Array(compressedBytesWithDeflate).buffer + }, + { + name: 'Int8Array', + value: new Int8Array(new Uint8Array(compressedBytesWithDeflate).buffer) + }, + { + name: 'Uint8Array', + value: new Uint8Array(new Uint8Array(compressedBytesWithDeflate).buffer) + }, + { + name: 'Uint8ClampedArray', + value: new Uint8ClampedArray(new Uint8Array(compressedBytesWithDeflate).buffer) + }, + { + name: 'Int16Array', + value: new Int16Array(new Uint8Array(compressedBytesWithDeflate).buffer) + }, + { + name: 'Uint16Array', + value: new Uint16Array(new Uint8Array(compressedBytesWithDeflate).buffer) + }, + { + name: 'Int32Array', + value: new Int32Array(new Uint8Array(compressedBytesWithDeflate).buffer) + }, + { + name: 'Uint32Array', + value: new Uint32Array(new Uint8Array(compressedBytesWithDeflate).buffer) + }, + { + name: 'Float32Array', + value: new Float32Array(new Uint8Array(compressedBytesWithDeflate).buffer) + }, + { + name: 'Float64Array', + value: new Float64Array(new Uint8Array(compressedBytesWithDeflate).buffer) + }, + { + name: 'DataView', + value: new DataView(new Uint8Array(compressedBytesWithDeflate).buffer) + }, +]; + +const bufferSourceChunksForGzip = [ + { + name: 'ArrayBuffer', + value: new Uint8Array(compressedBytesWithGzip).buffer + }, + { + name: 'Int8Array', + value: new Int8Array(new Uint8Array(compressedBytesWithGzip).buffer) + }, + { + name: 'Uint8Array', + value: new Uint8Array(new Uint8Array(compressedBytesWithGzip).buffer) + }, + { + name: 'Uint8ClambedArray', + value: new Uint8ClampedArray(new Uint8Array(compressedBytesWithGzip).buffer) + }, + { + name: 'Int16Array', + value: new Int16Array(new Uint8Array(compressedBytesWithGzip).buffer) + }, + { + name: 'Uint16Array', + value: new Uint16Array(new Uint8Array(compressedBytesWithGzip).buffer) + }, + { + name: 'Int32Array', + value: new Int32Array(new Uint8Array(compressedBytesWithGzip).buffer) + }, + { + name: 'Uint32Array', + value: new Uint32Array(new Uint8Array(compressedBytesWithGzip).buffer) + }, + { + name: 'Float32Array', + value: new Float32Array(new Uint8Array(compressedBytesWithGzip).buffer) + }, + { + name: 'Float64Array', + value: new Float64Array(new Uint8Array(compressedBytesWithGzip).buffer) + }, + { + name: 'DataView', + value: new DataView(new Uint8Array(compressedBytesWithGzip).buffer) + }, +]; + +const bufferSourceChunksForDeflateRaw = [ + { + name: 'ArrayBuffer', + value: new Uint8Array(compressedBytesWithDeflateRaw).buffer + }, + { + name: 'Int8Array', + value: new Int8Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer) + }, + { + name: 'Uint8Array', + value: new Uint8Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer) + }, + { + name: 'Uint8ClampedArray', + value: new Uint8ClampedArray(new Uint8Array(compressedBytesWithDeflateRaw).buffer) + }, + { + name: 'Int16Array', + value: new Int16Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer) + }, + { + name: 'Uint16Array', + value: new Uint16Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer) + }, + { + name: 'Int32Array', + value: new Int32Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer) + }, + { + name: 'Uint32Array', + value: new Uint32Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer) + }, + { + name: 'Float32Array', + value: new Float32Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer) + }, + { + name: 'Float64Array', + value: new Float64Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer) + }, + { + name: 'DataView', + value: new DataView(new Uint8Array(compressedBytesWithDeflateRaw).buffer) + }, +]; + +for (const chunk of bufferSourceChunksForDeflate) { + promise_test(async t => { + const ds = new DecompressionStream('deflate'); + const reader = ds.readable.getReader(); + const writer = ds.writable.getWriter(); + const writePromise = writer.write(chunk.value); + writer.close(); + const { value } = await reader.read(); + assert_array_equals(Array.from(value), deflateExpectedChunkValue, 'value should match'); + }, `chunk of type ${chunk.name} should work for deflate`); +} + +for (const chunk of bufferSourceChunksForGzip) { + promise_test(async t => { + const ds = new DecompressionStream('gzip'); + const reader = ds.readable.getReader(); + const writer = ds.writable.getWriter(); + const writePromise = writer.write(chunk.value); + writer.close(); + const { value } = await reader.read(); + assert_array_equals(Array.from(value), gzipExpectedChunkValue, 'value should match'); + }, `chunk of type ${chunk.name} should work for gzip`); +} + +for (const chunk of bufferSourceChunksForDeflateRaw) { + promise_test(async t => { + const ds = new DecompressionStream('deflate-raw'); + const reader = ds.readable.getReader(); + const writer = ds.writable.getWriter(); + const writePromise = writer.write(chunk.value); + writer.close(); + const { value } = await reader.read(); + assert_array_equals(Array.from(value), deflateRawExpectedChunkValue, 'value should match'); + }, `chunk of type ${chunk.name} should work for deflate-raw`); +} diff --git a/test/fixtures/wpt/compression/decompression-constructor-error.tentative.any.js b/test/fixtures/wpt/compression/decompression-constructor-error.tentative.any.js new file mode 100644 index 00000000000000..0270ba7353128c --- /dev/null +++ b/test/fixtures/wpt/compression/decompression-constructor-error.tentative.any.js @@ -0,0 +1,15 @@ +// META: global=window,worker,shadowrealm + +'use strict'; + +test(t => { + assert_throws_js(TypeError, () => new DecompressionStream('a'), 'constructor should throw'); +}, '"a" should cause the constructor to throw'); + +test(t => { + assert_throws_js(TypeError, () => new DecompressionStream(), 'constructor should throw'); +}, 'no input should cause the constructor to throw'); + +test(t => { + assert_throws_js(Error, () => new DecompressionStream({ toString() { throw Error(); } }), 'constructor should throw'); +}, 'non-string input should cause the constructor to throw'); diff --git a/test/fixtures/wpt/compression/decompression-correct-input.tentative.any.js b/test/fixtures/wpt/compression/decompression-correct-input.tentative.any.js new file mode 100644 index 00000000000000..90519445e3667b --- /dev/null +++ b/test/fixtures/wpt/compression/decompression-correct-input.tentative.any.js @@ -0,0 +1,39 @@ +// META: global=window,worker,shadowrealm + +'use strict'; + +const deflateChunkValue = new Uint8Array([120, 156, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 48, 173, 6, 36]); +const gzipChunkValue = new Uint8Array([31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 176, 1, 57, 179, 15, 0, 0, 0]); +const deflateRawChunkValue = new Uint8Array([ + 0x4b, 0xad, 0x28, 0x48, 0x4d, 0x2e, 0x49, 0x4d, 0x51, 0xc8, + 0x2f, 0x2d, 0x29, 0x28, 0x2d, 0x01, 0x00, +]); +const trueChunkValue = new TextEncoder().encode('expected output'); + +promise_test(async t => { + const ds = new DecompressionStream('deflate'); + const reader = ds.readable.getReader(); + const writer = ds.writable.getWriter(); + const writePromise = writer.write(deflateChunkValue); + const { done, value } = await reader.read(); + assert_array_equals(Array.from(value), trueChunkValue, "value should match"); +}, 'decompressing deflated input should work'); + + +promise_test(async t => { + const ds = new DecompressionStream('gzip'); + const reader = ds.readable.getReader(); + const writer = ds.writable.getWriter(); + const writePromise = writer.write(gzipChunkValue); + const { done, value } = await reader.read(); + assert_array_equals(Array.from(value), trueChunkValue, "value should match"); +}, 'decompressing gzip input should work'); + +promise_test(async t => { + const ds = new DecompressionStream('deflate-raw'); + const reader = ds.readable.getReader(); + const writer = ds.writable.getWriter(); + const writePromise = writer.write(deflateRawChunkValue); + const { done, value } = await reader.read(); + assert_array_equals(Array.from(value), trueChunkValue, "value should match"); +}, 'decompressing deflated (with -raw) input should work'); diff --git a/test/fixtures/wpt/compression/decompression-corrupt-input.tentative.any.js b/test/fixtures/wpt/compression/decompression-corrupt-input.tentative.any.js new file mode 100644 index 00000000000000..fc18197dfbd3db --- /dev/null +++ b/test/fixtures/wpt/compression/decompression-corrupt-input.tentative.any.js @@ -0,0 +1,318 @@ +// META global=window,worker,shadowrealm + +// This test checks that DecompressionStream behaves according to the standard +// when the input is corrupted. To avoid a combinatorial explosion in the +// number of tests, we only mutate one field at a time, and we only test +// "interesting" values. + +'use strict'; + +// The many different cases are summarised in this data structure. +const expectations = [ + { + format: 'deflate', + + // Decompresses to 'expected output'. + baseInput: [120, 156, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, + 40, 45, 1, 0, 48, 173, 6, 36], + + // See RFC1950 for the definition of the various fields used by deflate: + // https://tools.ietf.org/html/rfc1950. + fields: [ + { + // The function of this field. This matches the name used in the RFC. + name: 'CMF', + + // The offset of the field in bytes from the start of the input. + offset: 0, + + // The length of the field in bytes. + length: 1, + + cases: [ + { + // The value to set the field to. If the field contains multiple + // bytes, all the bytes will be set to this value. + value: 0, + + // The expected result. 'success' means the input is decoded + // successfully. 'error' means that the stream will be errored. + result: 'error' + } + ] + }, + { + name: 'FLG', + offset: 1, + length: 1, + + // FLG contains a 4-bit checksum (FCHECK) which is calculated in such a + // way that there are 4 valid values for this field. + cases: [ + { + value: 218, + result: 'success' + }, + { + value: 1, + result: 'success' + }, + { + value: 94, + result: 'success' + }, + { + // The remaining 252 values cause an error. + value: 157, + result: 'error' + } + ] + }, + { + name: 'DATA', + // In general, changing any bit of the data will trigger a checksum + // error. Only the last byte does anything else. + offset: 18, + length: 1, + cases: [ + { + value: 4, + result: 'success' + }, + { + value: 5, + result: 'error' + } + ] + }, + { + name: 'ADLER', + offset: -4, + length: 4, + cases: [ + { + value: 255, + result: 'error' + } + ] + } + ] + }, + { + format: 'gzip', + + // Decompresses to 'expected output'. + baseInput: [31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 75, 173, 40, 72, 77, 46, 73, + 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 176, 1, 57, 179, 15, 0, + 0, 0], + + // See RFC1952 for the definition of the various fields used by gzip: + // https://tools.ietf.org/html/rfc1952. + fields: [ + { + name: 'ID', + offset: 0, + length: 2, + cases: [ + { + value: 255, + result: 'error' + } + ] + }, + { + name: 'CM', + offset: 2, + length: 1, + cases: [ + { + value: 0, + result: 'error' + } + ] + }, + { + name: 'FLG', + offset: 3, + length: 1, + cases: [ + { + value: 1, // FTEXT + result: 'success' + }, + { + value: 2, // FHCRC + result: 'error' + } + ] + }, + { + name: 'MTIME', + offset: 4, + length: 4, + cases: [ + { + // Any value is valid for this field. + value: 255, + result: 'success' + } + ] + }, + { + name: 'XFL', + offset: 8, + length: 1, + cases: [ + { + // Any value is accepted. + value: 255, + result: 'success' + } + ] + }, + { + name: 'OS', + offset: 9, + length: 1, + cases: [ + { + // Any value is accepted. + value: 128, + result: 'success' + } + ] + }, + { + name: 'DATA', + + // The last byte of the data is the most interesting. + offset: 26, + length: 1, + cases: [ + { + value: 3, + result: 'error' + }, + { + value: 4, + result: 'success' + } + ] + }, + { + name: 'CRC', + offset: -8, + length: 4, + cases: [ + { + // Any change will error the stream. + value: 0, + result: 'error' + } + ] + }, + { + name: 'ISIZE', + offset: -4, + length: 4, + cases: [ + { + // A mismatch will error the stream. + value: 1, + result: 'error' + } + ] + } + ] + } +]; + +async function tryDecompress(input, format) { + const ds = new DecompressionStream(format); + const reader = ds.readable.getReader(); + const writer = ds.writable.getWriter(); + writer.write(input).catch(() => {}); + writer.close().catch(() => {}); + let out = []; + while (true) { + try { + const { value, done } = await reader.read(); + if (done) { + break; + } + out = out.concat(Array.from(value)); + } catch (e) { + if (e instanceof TypeError) { + return { result: 'error' }; + } else { + return { result: e.name }; + } + } + } + const expectedOutput = 'expected output'; + if (out.length !== expectedOutput.length) { + return { result: 'corrupt' }; + } + for (let i = 0; i < out.length; ++i) { + if (out[i] !== expectedOutput.charCodeAt(i)) { + return { result: 'corrupt' }; + } + } + return { result: 'success' }; +} + +function corruptInput(input, offset, length, value) { + const output = new Uint8Array(input); + if (offset < 0) { + offset += input.length; + } + for (let i = offset; i < offset + length; ++i) { + output[i] = value; + } + return output; +} + +for (const { format, baseInput, fields } of expectations) { + promise_test(async () => { + const { result } = await tryDecompress(new Uint8Array(baseInput), format); + assert_equals(result, 'success', 'decompression should succeed'); + }, `the unchanged input for '${format}' should decompress successfully`); + + promise_test(async () => { + const truncatedInput = new Uint8Array(baseInput.slice(0, -1)); + const { result } = await tryDecompress(truncatedInput, format); + assert_equals(result, 'error', 'decompression should fail'); + }, `truncating the input for '${format}' should give an error`); + + promise_test(async () => { + const extendedInput = new Uint8Array(baseInput.concat([0])); + const { result } = await tryDecompress(extendedInput, format); + assert_equals(result, 'error', 'decompression should fail'); + }, `trailing junk for '${format}' should give an error`); + + for (const { name, offset, length, cases } of fields) { + for (const { value, result } of cases) { + promise_test(async () => { + const corruptedInput = corruptInput(baseInput, offset, length, value); + const { result: actual } = + await tryDecompress(corruptedInput, format); + assert_equals(actual, result, 'result should match'); + }, `format '${format}' field ${name} should be ${result} for ${value}`); + } + } +} + +promise_test(async () => { + // Data generated in Python: + // ```py + // h = b"thequickbrownfoxjumped\x00" + // words = h.split() + // zdict = b''.join(words) + // co = zlib.compressobj(zdict=zdict) + // cd = co.compress(h) + co.flush() + // ``` + const { result } = await tryDecompress(new Uint8Array([ + 0x78, 0xbb, 0x74, 0xee, 0x09, 0x59, 0x2b, 0xc1, 0x2e, 0x0c, 0x00, 0x74, 0xee, 0x09, 0x59 + ]), 'deflate'); + assert_equals(result, 'error', 'Data compressed with a dictionary should throw TypeError'); +}, 'the deflate input compressed with dictionary should give an error') diff --git a/test/fixtures/wpt/compression/decompression-empty-input.tentative.any.js b/test/fixtures/wpt/compression/decompression-empty-input.tentative.any.js new file mode 100644 index 00000000000000..201db8ec0b0d7c --- /dev/null +++ b/test/fixtures/wpt/compression/decompression-empty-input.tentative.any.js @@ -0,0 +1,43 @@ +// META: global=window,worker,shadowrealm + +'use strict'; + +const gzipEmptyValue = new Uint8Array([31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0]); +const deflateEmptyValue = new Uint8Array([120, 156, 3, 0, 0, 0, 0, 1]); +const deflateRawEmptyValue = new Uint8Array([1, 0, 0, 255, 255]); + +promise_test(async t => { + const ds = new DecompressionStream('gzip'); + const reader = ds.readable.getReader(); + const writer = ds.writable.getWriter(); + const writePromise = writer.write(gzipEmptyValue); + writer.close(); + const { value, done } = await reader.read(); + assert_true(done, "read() should set done"); + assert_equals(value, undefined, "value should be undefined"); + await writePromise; +}, 'decompressing gzip empty input should work'); + +promise_test(async t => { + const ds = new DecompressionStream('deflate'); + const reader = ds.readable.getReader(); + const writer = ds.writable.getWriter(); + const writePromise = writer.write(deflateEmptyValue); + writer.close(); + const { value, done } = await reader.read(); + assert_true(done, "read() should set done"); + assert_equals(value, undefined, "value should be undefined"); + await writePromise; +}, 'decompressing deflate empty input should work'); + +promise_test(async t => { + const ds = new DecompressionStream('deflate-raw'); + const reader = ds.readable.getReader(); + const writer = ds.writable.getWriter(); + const writePromise = writer.write(deflateRawEmptyValue); + writer.close(); + const { value, done } = await reader.read(); + assert_true(done, "read() should set done"); + assert_equals(value, undefined, "value should be undefined"); + await writePromise; +}, 'decompressing deflate-raw empty input should work'); diff --git a/test/fixtures/wpt/compression/decompression-split-chunk.tentative.any.js b/test/fixtures/wpt/compression/decompression-split-chunk.tentative.any.js new file mode 100644 index 00000000000000..eb12c2a2360cd9 --- /dev/null +++ b/test/fixtures/wpt/compression/decompression-split-chunk.tentative.any.js @@ -0,0 +1,53 @@ +// META: global=window,worker,shadowrealm + +'use strict'; + +const compressedBytesWithDeflate = new Uint8Array([120, 156, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 48, 173, 6, 36]); +const compressedBytesWithGzip = new Uint8Array([31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 176, 1, 57, 179, 15, 0, 0, 0]); +const compressedBytesWithDeflateRaw = new Uint8Array([ + 0x4b, 0xad, 0x28, 0x48, 0x4d, 0x2e, 0x49, 0x4d, 0x51, 0xc8, + 0x2f, 0x2d, 0x29, 0x28, 0x2d, 0x01, 0x00, +]); +const expectedChunkValue = new TextEncoder().encode('expected output'); + +async function decompressArrayBuffer(input, format, chunkSize) { + const ds = new DecompressionStream(format); + const reader = ds.readable.getReader(); + const writer = ds.writable.getWriter(); + for (let beginning = 0; beginning < input.length; beginning += chunkSize) { + writer.write(input.slice(beginning, beginning + chunkSize)); + } + writer.close(); + const out = []; + let totalSize = 0; + while (true) { + const { value, done } = await reader.read(); + if (done) break; + out.push(value); + totalSize += value.byteLength; + } + const concatenated = new Uint8Array(totalSize); + let offset = 0; + for (const array of out) { + concatenated.set(array, offset); + offset += array.byteLength; + } + return concatenated; +} + +for (let chunkSize = 1; chunkSize < 16; ++chunkSize) { + promise_test(async t => { + const decompressedData = await decompressArrayBuffer(compressedBytesWithDeflate, 'deflate', chunkSize); + assert_array_equals(decompressedData, expectedChunkValue, "value should match"); + }, `decompressing splitted chunk into pieces of size ${chunkSize} should work in deflate`); + + promise_test(async t => { + const decompressedData = await decompressArrayBuffer(compressedBytesWithGzip, 'gzip', chunkSize); + assert_array_equals(decompressedData, expectedChunkValue, "value should match"); + }, `decompressing splitted chunk into pieces of size ${chunkSize} should work in gzip`); + + promise_test(async t => { + const decompressedData = await decompressArrayBuffer(compressedBytesWithDeflateRaw, 'deflate-raw', chunkSize); + assert_array_equals(decompressedData, expectedChunkValue, "value should match"); + }, `decompressing splitted chunk into pieces of size ${chunkSize} should work in deflate-raw`); +} diff --git a/test/fixtures/wpt/compression/decompression-uint8array-output.tentative.any.js b/test/fixtures/wpt/compression/decompression-uint8array-output.tentative.any.js new file mode 100644 index 00000000000000..0c45a0aaa727f1 --- /dev/null +++ b/test/fixtures/wpt/compression/decompression-uint8array-output.tentative.any.js @@ -0,0 +1,30 @@ +// META: global=window,worker,shadowrealm +// META: timeout=long +// +// This test isn't actually slow usually, but sometimes it takes >10 seconds on +// Firefox with service worker for no obvious reason. + +'use strict'; + +const deflateChunkValue = new Uint8Array([120, 156, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 48, 173, 6, 36]); +const gzipChunkValue = new Uint8Array([31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 176, 1, 57, 179, 15, 0, 0, 0]); + +promise_test(async t => { + const ds = new DecompressionStream('deflate'); + const reader = ds.readable.getReader(); + const writer = ds.writable.getWriter(); + const writePromise = writer.write(deflateChunkValue); + const { value } = await reader.read(); + assert_equals(value.constructor, Uint8Array, "type should match"); + await writePromise; +}, 'decompressing deflated output should give Uint8Array chunks'); + +promise_test(async t => { + const ds = new DecompressionStream('gzip'); + const reader = ds.readable.getReader(); + const writer = ds.writable.getWriter(); + const writePromise = writer.write(gzipChunkValue); + const { value } = await reader.read(); + assert_equals(value.constructor, Uint8Array, "type should match"); + await writePromise; +}, 'decompressing gzip output should give Uint8Array chunks'); diff --git a/test/fixtures/wpt/compression/decompression-with-detach.tentative.window.js b/test/fixtures/wpt/compression/decompression-with-detach.tentative.window.js new file mode 100644 index 00000000000000..1ff9c269837022 --- /dev/null +++ b/test/fixtures/wpt/compression/decompression-with-detach.tentative.window.js @@ -0,0 +1,41 @@ +// META: global=window,worker,shadowrealm +// META: script=resources/concatenate-stream.js + +'use strict'; + +const kInputLength = 1000000; + +async function createLargeCompressedInput() { + const cs = new CompressionStream('deflate'); + // The input has to be large enough that it won't fit in a single chunk when + // decompressed. + const writer = cs.writable.getWriter(); + writer.write(new Uint8Array(kInputLength)); + writer.close(); + return concatenateStream(cs.readable); +} + +promise_test(async () => { + const input = await createLargeCompressedInput(); + const ds = new DecompressionStream('deflate'); + const writer = ds.writable.getWriter(); + writer.write(input); + writer.close(); + // Object.prototype.then will be looked up synchronously when the promise + // returned by read() is resolved. + Object.defineProperty(Object.prototype, 'then', { + get() { + // Cause input to become detached and unreferenced. + try { + postMessage(undefined, 'nowhere', [input.buffer]); + } catch (e) { + // It's already detached. + } + } + }); + const output = await concatenateStream(ds.readable); + // If output successfully decompressed and gave the right length, we can be + // reasonably confident that no data corruption happened. + assert_equals( + output.byteLength, kInputLength, 'output should be the right length'); +}, 'data should be correctly decompressed even if input is detached partway'); diff --git a/test/fixtures/wpt/compression/idlharness-shadowrealm.window.js b/test/fixtures/wpt/compression/idlharness-shadowrealm.window.js new file mode 100644 index 00000000000000..2fdc807ee07e32 --- /dev/null +++ b/test/fixtures/wpt/compression/idlharness-shadowrealm.window.js @@ -0,0 +1,2 @@ +// META: script=/resources/idlharness-shadowrealm.js +idl_test_shadowrealm(["compression"], ["streams"]); diff --git a/test/fixtures/wpt/compression/idlharness.https.any.js b/test/fixtures/wpt/compression/idlharness.https.any.js new file mode 100644 index 00000000000000..8d96cf523c4953 --- /dev/null +++ b/test/fixtures/wpt/compression/idlharness.https.any.js @@ -0,0 +1,17 @@ +// META: script=/resources/WebIDLParser.js +// META: script=/resources/idlharness.js + +'use strict'; + +// https://wicg.github.io/compression/ + +idl_test( + ['compression'], + ['streams'], + idl_array => { + idl_array.add_objects({ + CompressionStream: ['new CompressionStream("deflate")'], + DecompressionStream: ['new DecompressionStream("deflate")'], + }); + } +); diff --git a/test/fixtures/wpt/compression/resources/concatenate-stream.js b/test/fixtures/wpt/compression/resources/concatenate-stream.js new file mode 100644 index 00000000000000..a35bb1416e7548 --- /dev/null +++ b/test/fixtures/wpt/compression/resources/concatenate-stream.js @@ -0,0 +1,25 @@ +'use strict'; + +// Read all the chunks from a stream that returns BufferSource objects and +// concatenate them into a single Uint8Array. +async function concatenateStream(readableStream) { + const reader = readableStream.getReader(); + let totalSize = 0; + const buffers = []; + while (true) { + const { value, done } = await reader.read(); + if (done) { + break; + } + buffers.push(value); + totalSize += value.byteLength; + } + reader.releaseLock(); + const concatenated = new Uint8Array(totalSize); + let offset = 0; + for (const buffer of buffers) { + concatenated.set(buffer, offset); + offset += buffer.byteLength; + } + return concatenated; +} diff --git a/test/fixtures/wpt/compression/third_party/pako/LICENSE b/test/fixtures/wpt/compression/third_party/pako/LICENSE new file mode 100644 index 00000000000000..a934ef8db47845 --- /dev/null +++ b/test/fixtures/wpt/compression/third_party/pako/LICENSE @@ -0,0 +1,21 @@ +(The MIT License) + +Copyright (C) 2014-2017 by Vitaly Puzrin and Andrei Tuputcyn + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/test/fixtures/wpt/compression/third_party/pako/README b/test/fixtures/wpt/compression/third_party/pako/README new file mode 100644 index 00000000000000..96028388ebb9d5 --- /dev/null +++ b/test/fixtures/wpt/compression/third_party/pako/README @@ -0,0 +1,2 @@ +original repository: +https://github.com/nodeca/pako diff --git a/test/fixtures/wpt/compression/third_party/pako/pako_inflate.min.js b/test/fixtures/wpt/compression/third_party/pako/pako_inflate.min.js new file mode 100644 index 00000000000000..a191a78a8956cd --- /dev/null +++ b/test/fixtures/wpt/compression/third_party/pako/pako_inflate.min.js @@ -0,0 +1 @@ +!function(e){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=e();else if("function"==typeof define&&define.amd)define([],e);else{("undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this).pako=e()}}(function(){return function r(o,s,f){function l(t,e){if(!s[t]){if(!o[t]){var i="function"==typeof require&&require;if(!e&&i)return i(t,!0);if(d)return d(t,!0);var n=new Error("Cannot find module '"+t+"'");throw n.code="MODULE_NOT_FOUND",n}var a=s[t]={exports:{}};o[t][0].call(a.exports,function(e){return l(o[t][1][e]||e)},a,a.exports,r,o,s,f)}return s[t].exports}for(var d="function"==typeof require&&require,e=0;e>>6:(i<65536?t[r++]=224|i>>>12:(t[r++]=240|i>>>18,t[r++]=128|i>>>12&63),t[r++]=128|i>>>6&63),t[r++]=128|63&i);return t},i.buf2binstring=function(e){return d(e,e.length)},i.binstring2buf=function(e){for(var t=new f.Buf8(e.length),i=0,n=t.length;i>10&1023,s[n++]=56320|1023&a)}return d(s,n)},i.utf8border=function(e,t){var i;for((t=t||e.length)>e.length&&(t=e.length),i=t-1;0<=i&&128==(192&e[i]);)i--;return i<0?t:0===i?t:i+l[e[i]]>t?i:t}},{"./common":1}],3:[function(e,t,i){"use strict";t.exports=function(e,t,i,n){for(var a=65535&e|0,r=e>>>16&65535|0,o=0;0!==i;){for(i-=o=2e3>>1:e>>>1;t[i]=e}return t}();t.exports=function(e,t,i,n){var a=s,r=n+i;e^=-1;for(var o=n;o>>8^a[255&(e^t[o])];return-1^e}},{}],6:[function(e,t,i){"use strict";t.exports=function(){this.text=0,this.time=0,this.xflags=0,this.os=0,this.extra=null,this.extra_len=0,this.name="",this.comment="",this.hcrc=0,this.done=!1}},{}],7:[function(e,t,i){"use strict";t.exports=function(e,t){var i,n,a,r,o,s,f,l,d,c,u,h,b,m,w,k,_,g,v,p,x,y,S,E,Z;i=e.state,n=e.next_in,E=e.input,a=n+(e.avail_in-5),r=e.next_out,Z=e.output,o=r-(t-e.avail_out),s=r+(e.avail_out-257),f=i.dmax,l=i.wsize,d=i.whave,c=i.wnext,u=i.window,h=i.hold,b=i.bits,m=i.lencode,w=i.distcode,k=(1<>>=v=g>>>24,b-=v,0===(v=g>>>16&255))Z[r++]=65535&g;else{if(!(16&v)){if(0==(64&v)){g=m[(65535&g)+(h&(1<>>=v,b-=v),b<15&&(h+=E[n++]<>>=v=g>>>24,b-=v,!(16&(v=g>>>16&255))){if(0==(64&v)){g=w[(65535&g)+(h&(1<>>=v,b-=v,(v=r-o)>3,h&=(1<<(b-=p<<3))-1,e.next_in=n,e.next_out=r,e.avail_in=n>>24&255)+(e>>>8&65280)+((65280&e)<<8)+((255&e)<<24)}function r(){this.mode=0,this.last=!1,this.wrap=0,this.havedict=!1,this.flags=0,this.dmax=0,this.check=0,this.total=0,this.head=null,this.wbits=0,this.wsize=0,this.whave=0,this.wnext=0,this.window=null,this.hold=0,this.bits=0,this.length=0,this.offset=0,this.extra=0,this.lencode=null,this.distcode=null,this.lenbits=0,this.distbits=0,this.ncode=0,this.nlen=0,this.ndist=0,this.have=0,this.next=null,this.lens=new z.Buf16(320),this.work=new z.Buf16(288),this.lendyn=null,this.distdyn=null,this.sane=0,this.back=0,this.was=0}function o(e){var t;return e&&e.state?(t=e.state,e.total_in=e.total_out=t.total=0,e.msg="",t.wrap&&(e.adler=1&t.wrap),t.mode=F,t.last=0,t.havedict=0,t.dmax=32768,t.head=null,t.hold=0,t.bits=0,t.lencode=t.lendyn=new z.Buf32(n),t.distcode=t.distdyn=new z.Buf32(a),t.sane=1,t.back=-1,T):U}function s(e){var t;return e&&e.state?((t=e.state).wsize=0,t.whave=0,t.wnext=0,o(e)):U}function f(e,t){var i,n;return e&&e.state?(n=e.state,t<0?(i=0,t=-t):(i=1+(t>>4),t<48&&(t&=15)),t&&(t<8||15=r.wsize?(z.arraySet(r.window,t,i-r.wsize,r.wsize,0),r.wnext=0,r.whave=r.wsize):(n<(a=r.wsize-r.wnext)&&(a=n),z.arraySet(r.window,t,i-n,a,r.wnext),(n-=a)?(z.arraySet(r.window,t,i-n,n,0),r.wnext=n,r.whave=r.wsize):(r.wnext+=a,r.wnext===r.wsize&&(r.wnext=0),r.whave>>8&255,i.check=N(i.check,B,2,0),d=l=0,i.mode=2;break}if(i.flags=0,i.head&&(i.head.done=!1),!(1&i.wrap)||(((255&l)<<8)+(l>>8))%31){e.msg="incorrect header check",i.mode=30;break}if(8!=(15&l)){e.msg="unknown compression method",i.mode=30;break}if(d-=4,x=8+(15&(l>>>=4)),0===i.wbits)i.wbits=x;else if(x>i.wbits){e.msg="invalid window size",i.mode=30;break}i.dmax=1<>8&1),512&i.flags&&(B[0]=255&l,B[1]=l>>>8&255,i.check=N(i.check,B,2,0)),d=l=0,i.mode=3;case 3:for(;d<32;){if(0===s)break e;s--,l+=n[r++]<>>8&255,B[2]=l>>>16&255,B[3]=l>>>24&255,i.check=N(i.check,B,4,0)),d=l=0,i.mode=4;case 4:for(;d<16;){if(0===s)break e;s--,l+=n[r++]<>8),512&i.flags&&(B[0]=255&l,B[1]=l>>>8&255,i.check=N(i.check,B,2,0)),d=l=0,i.mode=5;case 5:if(1024&i.flags){for(;d<16;){if(0===s)break e;s--,l+=n[r++]<>>8&255,i.check=N(i.check,B,2,0)),d=l=0}else i.head&&(i.head.extra=null);i.mode=6;case 6:if(1024&i.flags&&(s<(h=i.length)&&(h=s),h&&(i.head&&(x=i.head.extra_len-i.length,i.head.extra||(i.head.extra=new Array(i.head.extra_len)),z.arraySet(i.head.extra,n,r,h,x)),512&i.flags&&(i.check=N(i.check,n,h,r)),s-=h,r+=h,i.length-=h),i.length))break e;i.length=0,i.mode=7;case 7:if(2048&i.flags){if(0===s)break e;for(h=0;x=n[r+h++],i.head&&x&&i.length<65536&&(i.head.name+=String.fromCharCode(x)),x&&h>9&1,i.head.done=!0),e.adler=i.check=0,i.mode=12;break;case 10:for(;d<32;){if(0===s)break e;s--,l+=n[r++]<>>=7&d,d-=7&d,i.mode=27;break}for(;d<3;){if(0===s)break e;s--,l+=n[r++]<>>=1)){case 0:i.mode=14;break;case 1:if(H(i),i.mode=20,6!==t)break;l>>>=2,d-=2;break e;case 2:i.mode=17;break;case 3:e.msg="invalid block type",i.mode=30}l>>>=2,d-=2;break;case 14:for(l>>>=7&d,d-=7&d;d<32;){if(0===s)break e;s--,l+=n[r++]<>>16^65535)){e.msg="invalid stored block lengths",i.mode=30;break}if(i.length=65535&l,d=l=0,i.mode=15,6===t)break e;case 15:i.mode=16;case 16:if(h=i.length){if(s>>=5,d-=5,i.ndist=1+(31&l),l>>>=5,d-=5,i.ncode=4+(15&l),l>>>=4,d-=4,286>>=3,d-=3}for(;i.have<19;)i.lens[A[i.have++]]=0;if(i.lencode=i.lendyn,i.lenbits=7,S={bits:i.lenbits},y=C(0,i.lens,0,19,i.lencode,0,i.work,S),i.lenbits=S.bits,y){e.msg="invalid code lengths set",i.mode=30;break}i.have=0,i.mode=19;case 19:for(;i.have>>16&255,_=65535&Z,!((w=Z>>>24)<=d);){if(0===s)break e;s--,l+=n[r++]<>>=w,d-=w,i.lens[i.have++]=_;else{if(16===_){for(E=w+2;d>>=w,d-=w,0===i.have){e.msg="invalid bit length repeat",i.mode=30;break}x=i.lens[i.have-1],h=3+(3&l),l>>>=2,d-=2}else if(17===_){for(E=w+3;d>>=w)),l>>>=3,d-=3}else{for(E=w+7;d>>=w)),l>>>=7,d-=7}if(i.have+h>i.nlen+i.ndist){e.msg="invalid bit length repeat",i.mode=30;break}for(;h--;)i.lens[i.have++]=x}}if(30===i.mode)break;if(0===i.lens[256]){e.msg="invalid code -- missing end-of-block",i.mode=30;break}if(i.lenbits=9,S={bits:i.lenbits},y=C(I,i.lens,0,i.nlen,i.lencode,0,i.work,S),i.lenbits=S.bits,y){e.msg="invalid literal/lengths set",i.mode=30;break}if(i.distbits=6,i.distcode=i.distdyn,S={bits:i.distbits},y=C(D,i.lens,i.nlen,i.ndist,i.distcode,0,i.work,S),i.distbits=S.bits,y){e.msg="invalid distances set",i.mode=30;break}if(i.mode=20,6===t)break e;case 20:i.mode=21;case 21:if(6<=s&&258<=f){e.next_out=o,e.avail_out=f,e.next_in=r,e.avail_in=s,i.hold=l,i.bits=d,O(e,u),o=e.next_out,a=e.output,f=e.avail_out,r=e.next_in,n=e.input,s=e.avail_in,l=i.hold,d=i.bits,12===i.mode&&(i.back=-1);break}for(i.back=0;k=(Z=i.lencode[l&(1<>>16&255,_=65535&Z,!((w=Z>>>24)<=d);){if(0===s)break e;s--,l+=n[r++]<>g)])>>>16&255,_=65535&Z,!(g+(w=Z>>>24)<=d);){if(0===s)break e;s--,l+=n[r++]<>>=g,d-=g,i.back+=g}if(l>>>=w,d-=w,i.back+=w,i.length=_,0===k){i.mode=26;break}if(32&k){i.back=-1,i.mode=12;break}if(64&k){e.msg="invalid literal/length code",i.mode=30;break}i.extra=15&k,i.mode=22;case 22:if(i.extra){for(E=i.extra;d>>=i.extra,d-=i.extra,i.back+=i.extra}i.was=i.length,i.mode=23;case 23:for(;k=(Z=i.distcode[l&(1<>>16&255,_=65535&Z,!((w=Z>>>24)<=d);){if(0===s)break e;s--,l+=n[r++]<>g)])>>>16&255,_=65535&Z,!(g+(w=Z>>>24)<=d);){if(0===s)break e;s--,l+=n[r++]<>>=g,d-=g,i.back+=g}if(l>>>=w,d-=w,i.back+=w,64&k){e.msg="invalid distance code",i.mode=30;break}i.offset=_,i.extra=15&k,i.mode=24;case 24:if(i.extra){for(E=i.extra;d>>=i.extra,d-=i.extra,i.back+=i.extra}if(i.offset>i.dmax){e.msg="invalid distance too far back",i.mode=30;break}i.mode=25;case 25:if(0===f)break e;if(h=u-f,i.offset>h){if((h=i.offset-h)>i.whave&&i.sane){e.msg="invalid distance too far back",i.mode=30;break}h>i.wnext?(h-=i.wnext,b=i.wsize-h):b=i.wnext-h,h>i.length&&(h=i.length),m=i.window}else m=a,b=o-i.offset,h=i.length;for(fh?(m=O[C+o[g]],w=A[z+o[g]]):(m=96,w=0),f=1<<_-S,v=l=1<>S)+(l-=f)]=b<<24|m<<16|w|0,0!==l;);for(f=1<<_-1;B&f;)f>>=1;if(0!==f?(B&=f-1,B+=f):B=0,g++,0==--R[_]){if(_===p)break;_=t[i+o[g]]}if(x<_&&(B&c)!==d){for(0===S&&(S=x),u+=v,E=1<<(y=_-S);y+S 00:00:05.000 +Foo diff --git a/test/fixtures/wpt/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm b/test/fixtures/wpt/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm new file mode 100644 index 00000000000000..8b705dbc898070 Binary files /dev/null and b/test/fixtures/wpt/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm differ diff --git a/test/fixtures/wpt/versions.json b/test/fixtures/wpt/versions.json index 4b28c06e6c40df..64de3fbbbeff27 100644 --- a/test/fixtures/wpt/versions.json +++ b/test/fixtures/wpt/versions.json @@ -3,6 +3,10 @@ "commit": "dbd648158d337580885e70a54f929daf215211a0", "path": "common" }, + "compression": { + "commit": "c82521cfa587505746a853a24d22589633825b10", + "path": "compression" + }, "console": { "commit": "767ae354642bee1e4d90b28df4480475b9260e14", "path": "console" diff --git a/test/wpt/README.md b/test/wpt/README.md index a378e3244a1f0b..0f73062e289a47 100644 --- a/test/wpt/README.md +++ b/test/wpt/README.md @@ -153,7 +153,7 @@ expected failures. { "something.scope.js": { // the file name // Optional: If the requirement is not met, this test will be skipped - "requires": ["small-icu"], // supports: "small-icu", "full-icu" + "requires": ["small-icu"], // supports: "small-icu", "full-icu", "crypto" // Optional: the test will be skipped with the reason printed "skip": "explain why we cannot run a test that's supposed to pass", diff --git a/test/wpt/status/compression.json b/test/wpt/status/compression.json new file mode 100644 index 00000000000000..8c8535c15815e4 --- /dev/null +++ b/test/wpt/status/compression.json @@ -0,0 +1,58 @@ +{ + "compression-bad-chunks.tentative.any.js": { + "skip": "Execution \"hangs\", ArrayBuffer and TypedArray is not accepted and throws, instead of rejects during writer.write" + }, + "compression-constructor-error.tentative.any.js": { + "fail": { + "expected": [ + "non-string input should cause the constructor to throw" + ] + } + }, + "decompression-bad-chunks.tentative.any.js": { + "skip": "Execution \"hangs\", ArrayBuffer and TypedArray is not accepted and throws, instead of rejects during writer.write" + }, + "decompression-buffersource.tentative.any.js": { + "skip": "ArrayBuffer and TypedArray is not accepted and throws, instead of rejects during writer.write" + }, + "decompression-constructor-error.tentative.any.js": { + "fail": { + "expected": [ + "non-string input should cause the constructor to throw" + ] + } + }, + "compression-with-detach.tentative.window.js": { + "requires": ["crypto"] + }, + "decompression-corrupt-input.tentative.any.js": { + "fail": { + "expected": [ + "truncating the input for 'deflate' should give an error", + "trailing junk for 'deflate' should give an error", + "format 'deflate' field CMF should be error for 0", + "format 'deflate' field FLG should be error for 157", + "format 'deflate' field DATA should be error for 5", + "format 'deflate' field ADLER should be error for 255", + "truncating the input for 'gzip' should give an error", + "trailing junk for 'gzip' should give an error", + "format 'gzip' field ID should be error for 255", + "format 'gzip' field CM should be error for 0", + "format 'gzip' field FLG should be error for 2", + "format 'gzip' field DATA should be error for 3", + "format 'gzip' field CRC should be error for 0", + "format 'gzip' field ISIZE should be error for 1", + "the deflate input compressed with dictionary should give an error" + ] + } + }, + "idlharness-shadowrealm.window.js": { + "skip": "ShadowRealm support is not enabled" + }, + "idlharness.https.any.js": { + "skip": "wpt/resources is not as simple to bring up to date" + }, + "third_party/pako/pako_inflate.min.js": { + "skip": "This is not a test file." + } +} diff --git a/test/wpt/status/performance-timeline.json b/test/wpt/status/performance-timeline.json index 9a297e641437df..36eeb36782c9aa 100644 --- a/test/wpt/status/performance-timeline.json +++ b/test/wpt/status/performance-timeline.json @@ -7,6 +7,9 @@ ] } }, + "navigation-id.helper.js": { + "skip": "This is not a test file." + }, "webtiming-resolution.any.js": { "skip": "flaky" } diff --git a/test/wpt/test-compression.js b/test/wpt/test-compression.js new file mode 100644 index 00000000000000..6991adff5645b4 --- /dev/null +++ b/test/wpt/test-compression.js @@ -0,0 +1,7 @@ +'use strict'; + +const { WPTRunner } = require('../common/wpt'); + +const runner = new WPTRunner('compression'); + +runner.runJsTests();