feat(smartduplex): improve backpressure handling and web/node stream interoperability

This commit is contained in:
2026-03-02 06:55:11 +00:00
parent 1262c48fe9
commit 2acf1972a2
23 changed files with 1416 additions and 511 deletions

View File

@@ -1,7 +1,7 @@
import * as plugins from './smartstream.plugins.js';
/**
* Creates a Web ReadableStream from a file.
* Creates a Web ReadableStream from a file using pull-based backpressure.
*
* @param filePath - The path to the file to be read
* @returns A Web ReadableStream that reads the file in chunks
@@ -11,23 +11,53 @@ export function createWebReadableStreamFromFile(filePath: string): ReadableStrea
return new ReadableStream({
start(controller) {
// When data is available, enqueue it into the Web ReadableStream
fileStream.on('data', (chunk) => {
controller.enqueue(chunk as Uint8Array);
fileStream.on('error', (err) => {
controller.error(err);
});
// When the file stream ends, close the Web ReadableStream
fileStream.on('end', () => {
controller.close();
});
// If there's an error, error the Web ReadableStream
fileStream.on('error', (err) => {
controller.error(err);
// Pause immediately — pull() will drive reads
fileStream.pause();
},
pull(controller) {
return new Promise<void>((resolve, reject) => {
const chunk = fileStream.read();
if (chunk !== null) {
controller.enqueue(chunk as Uint8Array);
resolve();
return;
}
// No data available yet — wait for 'readable' or 'end'
const onReadable = () => {
cleanup();
const data = fileStream.read();
if (data !== null) {
controller.enqueue(data as Uint8Array);
}
resolve();
};
const onEnd = () => {
cleanup();
resolve();
};
const onError = (err: Error) => {
cleanup();
reject(err);
};
const cleanup = () => {
fileStream.removeListener('readable', onReadable);
fileStream.removeListener('end', onEnd);
fileStream.removeListener('error', onError);
};
fileStream.once('readable', onReadable);
fileStream.once('end', onEnd);
fileStream.once('error', onError);
});
},
cancel() {
// If the Web ReadableStream is canceled, destroy the file stream
fileStream.destroy();
}
});
@@ -43,23 +73,25 @@ export function convertWebReadableToNodeReadable(webStream: ReadableStream<Uint8
const reader = webStream.getReader();
return new plugins.stream.Readable({
async read() {
try {
const { value, done } = await reader.read();
if (done) {
this.push(null); // Signal end of stream
} else {
this.push(Buffer.from(value)); // Convert Uint8Array to Buffer for Node.js Readable
read() {
reader.read().then(
({ value, done }) => {
if (done) {
this.push(null);
} else {
this.push(Buffer.from(value));
}
},
(err) => {
this.destroy(err);
}
} catch (err) {
this.destroy(err); // Handle errors by destroying the stream
}
);
}
});
}
/**
* Converts a Node.js Readable stream to a Web ReadableStream.
* Converts a Node.js Readable stream to a Web ReadableStream using pull-based backpressure.
*
* @param nodeStream - The Node.js Readable stream to convert
* @returns A Web ReadableStream that reads data from the Node.js Readable stream
@@ -67,16 +99,50 @@ export function convertWebReadableToNodeReadable(webStream: ReadableStream<Uint8
export function convertNodeReadableToWebReadable(nodeStream: plugins.stream.Readable): ReadableStream<Uint8Array> {
return new ReadableStream({
start(controller) {
nodeStream.on('data', (chunk) => {
controller.enqueue(new Uint8Array(chunk));
nodeStream.on('error', (err) => {
controller.error(err);
});
nodeStream.on('end', () => {
controller.close();
});
nodeStream.on('error', (err) => {
controller.error(err);
// Pause immediately — pull() will drive reads
nodeStream.pause();
},
pull(controller) {
return new Promise<void>((resolve, reject) => {
const chunk = nodeStream.read();
if (chunk !== null) {
controller.enqueue(new Uint8Array(chunk));
resolve();
return;
}
// No data available yet — wait for 'readable' or 'end'
const onReadable = () => {
cleanup();
const data = nodeStream.read();
if (data !== null) {
controller.enqueue(new Uint8Array(data));
}
resolve();
};
const onEnd = () => {
cleanup();
resolve();
};
const onError = (err: Error) => {
cleanup();
reject(err);
};
const cleanup = () => {
nodeStream.removeListener('readable', onReadable);
nodeStream.removeListener('end', onEnd);
nodeStream.removeListener('error', onError);
};
nodeStream.once('readable', onReadable);
nodeStream.once('end', onEnd);
nodeStream.once('error', onError);
});
},
cancel() {
@@ -95,19 +161,23 @@ export function convertWebWritableToNodeWritable(webWritable: WritableStream<Uin
const writer = webWritable.getWriter();
return new plugins.stream.Writable({
async write(chunk, encoding, callback) {
try {
await writer.write(new Uint8Array(chunk));
callback();
} catch (err) {
callback(err);
}
write(chunk, encoding, callback) {
writer.write(new Uint8Array(chunk)).then(
() => callback(),
(err) => callback(err)
);
},
final(callback) {
writer.close().then(() => callback()).catch(callback);
},
destroy(err, callback) {
writer.abort(err).then(() => callback(err)).catch(callback);
if (err) {
writer.abort(err).then(() => callback(err)).catch(() => callback(err));
} else {
// Clean destroy — just release the lock
writer.releaseLock();
callback(null);
}
}
});
}
@@ -133,7 +203,7 @@ export function convertNodeWritableToWebWritable(nodeWritable: plugins.stream.Wr
},
close() {
return new Promise((resolve, reject) => {
nodeWritable.end((err) => {
nodeWritable.end((err: Error | null) => {
if (err) {
reject(err);
} else {
@@ -143,9 +213,7 @@ export function convertNodeWritableToWebWritable(nodeWritable: plugins.stream.Wr
});
},
abort(reason) {
return new Promise((resolve, reject) => {
nodeWritable.destroy(reason);
});
nodeWritable.destroy(reason instanceof Error ? reason : new Error(String(reason)));
}
});
}
}