Skip to content

Commit 91009bf

Browse files
stream: accept ArrayBuffer in CompressionStream and DecompressionStream
1 parent 413dffc commit 91009bf

File tree

3 files changed

+78
-3
lines changed

3 files changed

+78
-3
lines changed

lib/internal/webstreams/adapters.js

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,10 @@ const {
5353
Buffer,
5454
} = require('buffer');
5555

56+
const {
57+
isArrayBuffer,
58+
} = require('internal/util/types');
59+
5660
const {
5761
AbortError,
5862
ErrnoException,
@@ -213,6 +217,9 @@ function newWritableStreamFromStreamWritable(streamWritable) {
213217
start(c) { controller = c; },
214218

215219
write(chunk) {
220+
if (isArrayBuffer(chunk)) {
221+
chunk = new Uint8Array(chunk);
222+
}
216223
if (streamWritable.writableNeedDrain || !streamWritable.write(chunk)) {
217224
backpressurePromise = PromiseWithResolvers();
218225
return SafePromisePrototypeFinally(
Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,71 @@
1+
'use strict';
2+
const common = require('../common');
3+
const assert = require('assert');
4+
const { DecompressionStream, CompressionStream } = require('stream/web');
5+
6+
// Minimal gzip-compressed bytes for "hello"
7+
const compressedGzip = new Uint8Array([
8+
31, 139, 8, 0, 0, 0, 0, 0, 0, 3,
9+
203, 72, 205, 201, 201, 7, 0, 134, 166, 16, 54, 5, 0, 0, 0,
10+
]);
11+
12+
async function testDecompressionAcceptsArrayBuffer() {
13+
const ds = new DecompressionStream('gzip');
14+
const reader = ds.readable.getReader();
15+
const writer = ds.writable.getWriter();
16+
17+
const writePromise = writer.write(compressedGzip.buffer);
18+
writer.close();
19+
20+
const chunks = [];
21+
let done = false;
22+
while (!done) {
23+
const { value, done: d } = await reader.read();
24+
if (value) chunks.push(value);
25+
done = d;
26+
}
27+
await writePromise;
28+
const out = Buffer.concat(chunks.map((c) => Buffer.from(c)));
29+
assert.strictEqual(out.toString(), 'hello');
30+
}
31+
32+
async function testCompressionRoundTripWithArrayBuffer() {
33+
const cs = new CompressionStream('gzip');
34+
const ds = new DecompressionStream('gzip');
35+
36+
const csWriter = cs.writable.getWriter();
37+
const csReader = cs.readable.getReader();
38+
const dsWriter = ds.writable.getWriter();
39+
const dsReader = ds.readable.getReader();
40+
41+
const input = new TextEncoder().encode('hello').buffer;
42+
43+
await csWriter.write(input);
44+
csWriter.close();
45+
46+
const compressed = [];
47+
let done = false;
48+
while (!done) {
49+
const { value, done: d } = await csReader.read();
50+
if (value) compressed.push(value);
51+
done = d;
52+
}
53+
54+
for (const chunk of compressed) await dsWriter.write(chunk);
55+
dsWriter.close();
56+
57+
const out = [];
58+
done = false;
59+
while (!done) {
60+
const { value, done: d } = await dsReader.read();
61+
if (value) out.push(value);
62+
done = d;
63+
}
64+
const result = Buffer.concat(out.map((c) => Buffer.from(c)));
65+
assert.strictEqual(result.toString(), 'hello');
66+
}
67+
68+
Promise.all([
69+
testDecompressionAcceptsArrayBuffer(),
70+
testCompressionRoundTripWithArrayBuffer(),
71+
]).then(common.mustCall());

test/wpt/status/compression.json

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,6 @@
55
"decompression-bad-chunks.tentative.any.js": {
66
"skip": "Execution \"hangs\", ArrayBuffer and TypedArray is not accepted and throws, instead of rejects during writer.write"
77
},
8-
"decompression-buffersource.tentative.any.js": {
9-
"skip": "ArrayBuffer and TypedArray is not accepted and throws, instead of rejects during writer.write"
10-
},
118
"compression-with-detach.tentative.window.js": {
129
"requires": ["crypto"]
1310
},

0 commit comments

Comments
 (0)