feat(smartstream): bump dependencies, update build/publish config, refactor tests, and overhaul documentation

This commit is contained in:
2026-02-28 08:48:58 +00:00
parent 3d13cb76f6
commit 9b9b1be62b
15 changed files with 5720 additions and 1756 deletions

View File

@@ -1,5 +1,14 @@
# Changelog
## 2026-02-28 - 3.3.0 - feat(smartstream)
bump dependencies, update build/publish config, refactor tests, and overhaul documentation
- Upgrade devDependencies (e.g. @git.zone/tsbuild -> ^4.1.2, @git.zone/tsrun -> ^2.0.1, @git.zone/tstest -> ^3.1.8, @types/node -> ^25.3.2) and runtime deps (e.g. @push.rocks/lik -> ^6.2.2, @push.rocks/smartenv -> ^6.0.0, @push.rocks/smartpromise -> ^4.2.3, @push.rocks/smartrx -> ^3.0.10).
- Refactor tests to use Node's native fs streams instead of @push.rocks/smartfile.fsStream and export default tap.start() to support ESM test runner patterns.
- Adjust build/publish: remove --web flag from build script, add pnpm override for agentkeepalive, add tspublish.json files for publish order, and add release registries/access in npmextra.json (verdaccio + npm).
- Rework project metadata in npmextra.json (namespaced @git.zone keys, tsdoc entry changes) and minor TypeScript/web fix: cast stream/web constructors to any in ts_web/plugins.ts.
- Large README rewrite: improved installation (pnpm), clearer Node vs Web entrypoints, expanded examples, and updated legal/license wording.
## 2024-11-19 - 3.2.5 - fix(nodewebhelpers)
Fix import and use correct module structure for Node.js streams in smartstream.nodewebhelpers.ts

View File

@@ -1,9 +1,5 @@
{
"npmci": {
"npmGlobalTools": [],
"npmAccessLevel": "public"
},
"gitzone": {
"@git.zone/cli": {
"projectType": "npm",
"module": {
"githost": "code.foss.global",
@@ -31,9 +27,19 @@
"stream utilities",
"esm"
]
},
"release": {
"registries": [
"https://verdaccio.lossless.digital",
"https://registry.npmjs.org"
],
"accessLevel": "public"
}
},
"tsdoc": {
"@git.zone/tsdoc": {
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
},
"@ship.zone/szci": {
"npmGlobalTools": []
}
}

View File

@@ -10,7 +10,7 @@
},
"scripts": {
"test": "(tstest test/)",
"build": "(tsbuild tsfolders --web --allowimplicitany)"
"build": "(tsbuild tsfolders --allowimplicitany)"
},
"repository": {
"type": "git",
@@ -23,18 +23,17 @@
},
"homepage": "https://code.foss.global/push.rocks/smartstream",
"devDependencies": {
"@git.zone/tsbuild": "^2.1.80",
"@git.zone/tsrun": "^1.2.44",
"@git.zone/tstest": "^1.0.90",
"@push.rocks/smartfile": "^11.0.15",
"@push.rocks/tapbundle": "^5.0.23",
"@types/node": "^20.12.12"
"@git.zone/tsbuild": "^4.1.2",
"@git.zone/tsrun": "^2.0.1",
"@git.zone/tstest": "^3.1.8",
"@push.rocks/tapbundle": "^6.0.3",
"@types/node": "^25.3.2"
},
"dependencies": {
"@push.rocks/lik": "^6.0.15",
"@push.rocks/smartenv": "^5.0.12",
"@push.rocks/smartpromise": "^4.0.3",
"@push.rocks/smartrx": "^3.0.7"
"@push.rocks/lik": "^6.2.2",
"@push.rocks/smartenv": "^6.0.0",
"@push.rocks/smartpromise": "^4.2.3",
"@push.rocks/smartrx": "^3.0.10"
},
"browserslist": [
"last 1 chrome versions"
@@ -51,6 +50,11 @@
"npmextra.json",
"readme.md"
],
"pnpm": {
"overrides": {
"agentkeepalive": "^4.6.0"
}
},
"keywords": [
"stream",
"node.js",

6712
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

667
readme.md
View File

@@ -1,375 +1,472 @@
```markdown
# @push.rocks/smartstream
A TypeScript library to simplify the creation and manipulation of Node.js streams, providing utilities for transform, duplex, and readable/writable stream handling while managing backpressure effectively.
A TypeScript-first library for creating and manipulating Node.js and Web streams with built-in backpressure handling, async transformations, and seamless Node.js ↔ Web stream interoperability.
## Issue Reporting and Security
For reporting bugs, issues, or security vulnerabilities, please visit [community.foss.global/](https://community.foss.global/). This is the central community hub for all issue reporting. Developers who sign and comply with our contribution agreement and go through identification can also get a [code.foss.global/](https://code.foss.global/) account to submit Pull Requests directly.
## Install
To install `@push.rocks/smartstream`, you can use npm or yarn as follows:
```bash
npm install @push.rocks/smartstream --save
# OR
yarn add @push.rocks/smartstream
pnpm install @push.rocks/smartstream
```
This will add `@push.rocks/smartstream` to your project's dependencies.
The package ships with two entry points:
| Entry Point | Import Path | Environment |
|---|---|---|
| **Node.js** (default) | `@push.rocks/smartstream` | Node.js — full stream utilities, duplex, intake, wrappers, and Node↔Web helpers |
| **Web** | `@push.rocks/smartstream/web` | Browser & Node.js — pure Web Streams API (`WebDuplexStream`) |
## Usage
The `@push.rocks/smartstream` module is designed to simplify working with Node.js streams by providing a set of utilities for creating and manipulating streams. This module makes extensive use of TypeScript for improved code quality, readability, and maintenance. ESM syntax is utilized throughout the examples.
All examples use ESM / TypeScript syntax.
### Importing the Module
Start by importing the module into your TypeScript file:
### 📦 Importing
```typescript
import * as smartstream from '@push.rocks/smartstream';
// Node.js — full API
import {
SmartDuplex,
StreamWrapper,
StreamIntake,
createTransformFunction,
createPassThrough,
nodewebhelpers,
} from '@push.rocks/smartstream';
// Web — browser-safe, zero Node.js dependencies
import { WebDuplexStream } from '@push.rocks/smartstream/web';
```
For a more specific import, you may do the following:
---
### 🔄 SmartDuplex — The Core Stream Primitive
`SmartDuplex` extends Node.js `Duplex` with first-class async support, built-in backpressure management, and a clean functional API. Instead of overriding `_transform` or `_write` manually, you pass a `writeFunction` that receives each chunk along with a `tools` object.
#### Basic Transform
```typescript
import { SmartDuplex, StreamWrapper, StreamIntake, createTransformFunction, createPassThrough } from '@push.rocks/smartstream';
import { SmartDuplex } from '@push.rocks/smartstream';
const upperCaser = new SmartDuplex<Buffer, Buffer>({
writeFunction: async (chunk, tools) => {
// Return a value to push it downstream
return Buffer.from(chunk.toString().toUpperCase());
},
});
readableStream.pipe(upperCaser).pipe(writableStream);
```
### Creating Basic Transform Streams
#### Using `tools.push()` for Multiple Outputs
The module provides utilities for creating transform streams. For example, to create a transform stream that modifies chunks of data, you can use the `createTransformFunction` utility:
The `writeFunction` can emit multiple chunks per input via `tools.push()`:
```typescript
const splitter = new SmartDuplex<string, string>({
objectMode: true,
writeFunction: async (chunk, tools) => {
const words = chunk.split(' ');
for (const word of words) {
await tools.push(word);
}
// Returning nothing — output was already pushed
},
});
```
#### Final Function
Run cleanup or emit final data when the writable side ends:
```typescript
const aggregator = new SmartDuplex<number, number>({
objectMode: true,
writeFunction: async (chunk, tools) => {
runningTotal += chunk;
// Don't emit anything per-chunk
},
finalFunction: async (tools) => {
return runningTotal; // Emitted as the last chunk
},
});
```
#### Truncating a Stream Early
Call `tools.truncate()` inside `writeFunction` to signal that no more data should be read:
```typescript
const limiter = new SmartDuplex<string, string>({
objectMode: true,
writeFunction: async (chunk, tools) => {
if (chunk === 'STOP') {
tools.truncate();
return;
}
return chunk;
},
});
```
#### Creating from a Buffer
```typescript
const stream = SmartDuplex.fromBuffer(Buffer.from('hello world'));
stream.on('data', (chunk) => console.log(chunk.toString())); // "hello world"
```
#### Creating from a Web ReadableStream
Bridge the Web Streams API into a Node.js Duplex:
```typescript
const response = await fetch('https://example.com/data');
const nodeDuplex = SmartDuplex.fromWebReadableStream(response.body);
nodeDuplex.pipe(processTransform).pipe(outputStream);
```
#### Getting Web Streams from SmartDuplex
Convert a `SmartDuplex` into Web `ReadableStream` + `WritableStream` pair:
```typescript
const duplex = new SmartDuplex({
writeFunction: async (chunk, tools) => {
return transform(chunk);
},
});
const { readable, writable } = await duplex.getWebStreams();
```
#### Debug Mode
Pass `debug: true` and `name` to get detailed internal logs:
```typescript
const stream = new SmartDuplex({
name: 'MyStream',
debug: true,
writeFunction: async (chunk, tools) => chunk,
});
```
---
### 🧩 StreamWrapper — Pipeline Composition
`StreamWrapper` takes an array of streams, pipes them together, attaches error listeners on all of them, and returns a `Promise` that resolves when the pipeline finishes:
```typescript
import { StreamWrapper } from '@push.rocks/smartstream';
import fs from 'fs';
const pipeline = new StreamWrapper([
fs.createReadStream('./input.txt'),
new SmartDuplex({
writeFunction: async (chunk) => Buffer.from(chunk.toString().toUpperCase()),
}),
fs.createWriteStream('./output.txt'),
]);
await pipeline.run();
console.log('Pipeline complete!');
```
Error handling is automatic — if any stream in the array errors, the returned promise rejects:
```typescript
pipeline.run()
.then(() => console.log('Done'))
.catch((err) => console.error('Pipeline failed:', err));
```
You can also listen for custom events across all streams:
```typescript
pipeline.onCustomEvent('progress', () => {
console.log('Progress event fired');
});
```
---
### 📥 StreamIntake — Dynamic Data Injection
`StreamIntake` is a `Readable` stream that lets you programmatically push data into a pipeline. It operates in object mode by default and provides a reactive observable (`pushNextObservable`) for demand-driven data production.
```typescript
import { StreamIntake, SmartDuplex } from '@push.rocks/smartstream';
const intake = new StreamIntake<string>();
// Pipe through a transform
intake
.pipe(new SmartDuplex({
objectMode: true,
writeFunction: async (chunk) => {
console.log('Processing:', chunk);
return chunk;
},
}))
.on('data', (data) => console.log('Output:', data));
// Push data whenever it's ready
intake.pushData('Hello');
intake.pushData('World');
intake.signalEnd(); // Signal end-of-stream
```
#### Demand-driven Production with Observable
`pushNextObservable` emits whenever the stream is ready for more data — perfect for throttled or event-driven producers:
```typescript
const intake = new StreamIntake<number>();
let counter = 0;
intake.pushNextObservable.subscribe(() => {
if (counter < 100) {
intake.pushData(counter++);
} else {
intake.signalEnd();
}
});
intake.pipe(consumer);
```
#### Creating from Existing Streams
Wrap a Node.js `Readable` or a Web `ReadableStream`:
```typescript
// From Node.js Readable
const intake = await StreamIntake.fromStream<Buffer>(fs.createReadStream('./data.bin'));
// From Web ReadableStream
const response = await fetch('https://example.com/stream');
const intake = await StreamIntake.fromStream<Uint8Array>(response.body);
```
---
### ⚡ Utility Functions
#### `createTransformFunction`
Quickly create a `SmartDuplex` from a simple async mapping function:
```typescript
import { createTransformFunction } from '@push.rocks/smartstream';
const upperCaseTransform = createTransformFunction<string, string>(async (chunk) => {
return chunk.toUpperCase();
const doubler = createTransformFunction<number, number>(async (n) => n * 2);
intakeStream.pipe(doubler).pipe(outputStream);
```
#### `createPassThrough`
Create an object-mode passthrough stream (useful as an intermediary or tee point):
```typescript
import { createPassThrough } from '@push.rocks/smartstream';
const passThrough = createPassThrough();
source.pipe(passThrough).pipe(destination);
```
---
### 🌐 WebDuplexStream — Pure Web Streams API
`WebDuplexStream` extends `TransformStream` and works in both browsers and Node.js. Import it from the `/web` subpath for zero Node.js dependencies.
```typescript
import { WebDuplexStream } from '@push.rocks/smartstream/web';
const stream = new WebDuplexStream<number, number>({
writeFunction: async (chunk, { push }) => {
push(chunk * 2); // Push transformed data
},
});
// Usage with pipe
readableStream
.pipe(upperCaseTransform)
.pipe(writableStream);
const writer = stream.writable.getWriter();
const reader = stream.readable.getReader();
// Write
await writer.write(5);
await writer.write(10);
await writer.close();
// Read
const { value } = await reader.read(); // 10
const { value: v2 } = await reader.read(); // 20
```
### Handling Backpressure with SmartDuplex
`SmartDuplex` is a powerful part of the `smartstream` module designed to handle backpressure effectively. Here's an example of how to create a `SmartDuplex` stream that processes data and respects the consumer's pace:
#### From a Uint8Array
```typescript
import { SmartDuplex } from '@push.rocks/smartstream';
const stream = WebDuplexStream.fromUInt8Array(new Uint8Array([1, 2, 3]));
const reader = stream.readable.getReader();
const { value } = await reader.read(); // Uint8Array [1, 2, 3]
```
const processDataDuplex = new SmartDuplex({
async writeFunction(chunk, { push }) {
const processedChunk = await processChunk(chunk); // Assume this is a defined asynchronous function
push(processedChunk);
}
#### Data Production with `readFunction`
Supply data into the stream from any async source:
```typescript
const stream = new WebDuplexStream<string, string>({
readFunction: async (tools) => {
await tools.write('chunk 1');
await tools.write('chunk 2');
tools.done(); // Signal end
},
writeFunction: async (chunk, { push }) => {
push(chunk.toUpperCase());
},
});
sourceStream.pipe(processDataDuplex).pipe(destinationStream);
const reader = stream.readable.getReader();
// reads "CHUNK 1", "CHUNK 2"
```
### Combining Multiple Streams
---
`Smartstream` facilitates easy combining of multiple streams into a single pipeline, handling errors and cleanup automatically. Here's how you can combine multiple streams:
### 🔀 Node ↔ Web Stream Converters
The `nodewebhelpers` namespace provides bidirectional converters between Node.js and Web Streams:
```typescript
import { StreamWrapper } from '@push.rocks/smartstream';
const combinedStream = new StreamWrapper([
readStream, // Source stream
transformStream1, // Transformation
transformStream2, // Another transformation
writeStream // Destination stream
]);
combinedStream.run()
.then(() => console.log('Processing completed.'))
.catch(err => console.error('An error occurred:', err));
import { nodewebhelpers } from '@push.rocks/smartstream';
```
### Working with StreamIntake
| Function | From | To |
|---|---|---|
| `createWebReadableStreamFromFile(path)` | File path | Web `ReadableStream<Uint8Array>` |
| `convertWebReadableToNodeReadable(webStream)` | Web `ReadableStream` | Node.js `Readable` |
| `convertNodeReadableToWebReadable(nodeStream)` | Node.js `Readable` | Web `ReadableStream` |
| `convertWebWritableToNodeWritable(webWritable)` | Web `WritableStream` | Node.js `Writable` |
| `convertNodeWritableToWebWritable(nodeWritable)` | Node.js `Writable` | Web `WritableStream` |
`StreamIntake` allows for more dynamic control of the reading process, facilitating scenarios where data is not continuously available:
#### Example: Serve a File as a Web ReadableStream
```typescript
import { StreamIntake } from '@push.rocks/smartstream';
const webStream = nodewebhelpers.createWebReadableStreamFromFile('./video.mp4');
const streamIntake = new StreamIntake<string>();
// Dynamically push data into the intake
streamIntake.pushData('Hello, World!');
streamIntake.pushData('Another message');
// Signal end when no more data is to be pushed
streamIntake.signalEnd();
// Use with fetch Response, service workers, etc.
return new Response(webStream, {
headers: { 'Content-Type': 'video/mp4' },
});
```
### Real-world Scenario: Processing Large Files
Consider a scenario where you need to process a large CSV file, transform the data row-by-row, and then write the results to a database or another file. With `smartstream`, you could create a pipe that reads the CSV, processes each row, and handles backpressure, ensuring efficient use of resources.
#### Example: Convert Between Stream Types
```typescript
import { SmartDuplex, createTransformFunction } from '@push.rocks/smartstream';
import fs from 'fs';
import csvParser from 'csv-parser';
import { nodewebhelpers } from '@push.rocks/smartstream';
const csvReadTransform = createTransformFunction<any, any>(async (row) => {
// Process row
return processedRow;
});
// Node → Web
const nodeReadable = fs.createReadStream('./data.bin');
const webReadable = nodewebhelpers.convertNodeReadableToWebReadable(nodeReadable);
fs.createReadStream('path/to/largeFile.csv')
.pipe(csvParser())
.pipe(csvReadTransform)
.pipe(new SmartDuplex({
async writeFunction(chunk, { push }) {
await writeToDatabase(chunk); // Assume this writes to a database
}
}))
.on('finish', () => console.log('File processed successfully.'));
// Web → Node
const nodeReadable2 = nodewebhelpers.convertWebReadableToNodeReadable(webReadable);
nodeReadable2.pipe(fs.createWriteStream('./copy.bin'));
```
This example demonstrates reading a large CSV file, transforming each row with `createTransformFunction`, and using a `SmartDuplex` to manage the processed data flow efficiently, ensuring no data is lost due to backpressure issues.
---
### Advanced Use Case: Backpressure Handling
### 🏗️ Backpressure Handling
Effective backpressure handling is crucial when working with streams to avoid overwhelming the downstream consumers. Heres a comprehensive example that demonstrates handling backpressure in a pipeline with multiple `SmartDuplex` instances:
`SmartDuplex` uses a `BackpressuredArray` internally, bounded by `highWaterMark` (default: 1). When the downstream consumer is slow, the stream automatically pauses the upstream producer until space is available — no manual bookkeeping required.
```typescript
import { SmartDuplex } from '@push.rocks/smartstream';
// Define the first SmartDuplex, which writes data slowly to simulate backpressure
const slowProcessingStream = new SmartDuplex({
name: 'SlowProcessor',
const slow = new SmartDuplex({
name: 'SlowConsumer',
objectMode: true,
writeFunction: async (chunk, { push }) => {
await new Promise(resolve => setTimeout(resolve, 100)); // Simulated delay
console.log('Processed chunk:', chunk);
push(chunk);
}
highWaterMark: 1,
writeFunction: async (chunk, tools) => {
await new Promise((resolve) => setTimeout(resolve, 200));
return chunk;
},
});
// Define the second SmartDuplex as a fast processor
const fastProcessingStream = new SmartDuplex({
name: 'FastProcessor',
const fast = new SmartDuplex({
name: 'FastProducer',
objectMode: true,
writeFunction: async (chunk, { push }) => {
console.log('Fast processing chunk:', chunk);
push(chunk);
}
writeFunction: async (chunk, tools) => {
return chunk; // Instant processing
},
});
// Create a StreamIntake to dynamically handle incoming data
const streamIntake = new StreamIntake<string>();
// Backpressure is handled automatically between fast → slow
fast.pipe(slow).on('data', (d) => console.log(d));
// Chain the streams together and handle the backpressure scenario
streamIntake
.pipe(fastProcessingStream)
.pipe(slowProcessingStream)
.pipe(createPassThrough()) // Use Pass-Through to provide intermediary handling
.on('data', data => console.log('Final output:', data))
.on('error', error => console.error('Stream encountered an error:', error));
// Simulate data pushing with intervals to observe backpressure handling
let counter = 0;
const interval = setInterval(() => {
if (counter >= 10) {
streamIntake.signalEnd();
clearInterval(interval);
} else {
streamIntake.pushData(`Chunk ${counter}`);
counter++;
}
}, 50);
for (let i = 0; i < 100; i++) {
fast.write(`chunk-${i}`);
}
fast.end();
```
In this advanced use case, a `SlowProcessor` and `FastProcessor` are created using `SmartDuplex`, simulating a situation where one stream is slower than another. The `StreamIntake` dynamically handles incoming chunks of data and the intermediary Pass-Through handles any potential interruptions.
---
### Transform Streams in Parallel
For scenarios where you need to process data in parallel:
### 🎯 Real-World Example: Processing Pipeline
```typescript
import { SmartDuplex, createTransformFunction } from '@push.rocks/smartstream';
const parallelTransform = createTransformFunction<any, any>(async (chunk) => {
// Parallel Processing
const results = await Promise.all(chunk.map(async item => await processItem(item)));
return results;
});
const streamIntake = new StreamIntake<any[]>();
streamIntake
.pipe(parallelTransform)
.pipe(new SmartDuplex({
async writeFunction(chunk, { push }) {
console.log('Processed parallel chunk:', chunk);
push(chunk);
}
}))
.on('finish', () => console.log('Parallel processing completed.'));
// Simulate data pushing
streamIntake.pushData([1, 2, 3, 4]);
streamIntake.pushData([5, 6, 7, 8]);
streamIntake.signalEnd();
```
### Error Handling in Stream Pipelines
Error handling is an essential part of working with streams. The `StreamWrapper` assists in combining multiple streams while managing errors seamlessly:
```typescript
import { StreamWrapper } from '@push.rocks/smartstream';
const faultyStream = new SmartDuplex({
async writeFunction(chunk, { push }) {
if (chunk === 'bad data') {
throw new Error('Faulty data encountered');
}
push(chunk);
}
});
const readStream = new StreamIntake<string>();
const writeStream = new SmartDuplex({
async writeFunction(chunk) {
console.log('Written chunk:', chunk);
}
});
const combinedStream = new StreamWrapper([readStream, faultyStream, writeStream]);
combinedStream.run()
.then(() => console.log('Stream processing completed.'))
.catch(err => console.error('Stream error:', err.message));
// Push Data
readStream.pushData('good data');
readStream.pushData('bad data'); // This will throw an error
readStream.pushData('more good data');
readStream.signalEnd();
```
### Testing Streams
Here's an example test case using the `tap` testing framework to verify the integrity of the `SmartDuplex` from a buffer:
```typescript
import { expect, tap } from '@push.rocks/tapbundle';
import { SmartDuplex } from '@push.rocks/smartstream';
tap.test('should create a SmartStream from a Buffer', async () => {
const bufferData = Buffer.from('This is a test buffer');
const smartStream = SmartDuplex.fromBuffer(bufferData, {});
let receivedData = Buffer.alloc(0);
return new Promise<void>((resolve) => {
smartStream.on('data', (chunk: Buffer) => {
receivedData = Buffer.concat([receivedData, chunk]);
});
smartStream.on('end', () => {
expect(receivedData.toString()).toEqual(bufferData.toString());
resolve();
});
});
});
tap.start();
```
### Working with Files and Buffers
You can easily stream files and buffers with `smartstream`. Heres a test illustrating reading and writing with file streams using `smartfile` combined with `smartstream` utilities:
```typescript
import { tap } from '@push.rocks/tapbundle';
import * as smartfile from '@push.rocks/smartfile';
import fs from 'fs';
import { SmartDuplex, StreamWrapper } from '@push.rocks/smartstream';
tap.test('should handle file read and write streams', async () => {
const readStream = smartfile.fsStream.createReadStream('./test/assets/readabletext.txt');
const writeStream = smartfile.fsStream.createWriteStream('./test/assets/writabletext.txt');
const transformStream = new SmartDuplex({
async writeFunction(chunk, { push }) {
const transformedChunk = chunk.toString().toUpperCase();
push(transformedChunk);
// Read → Transform → Filter → Write
const pipeline = new StreamWrapper([
fs.createReadStream('./access.log'),
new SmartDuplex({
writeFunction: async (chunk) => {
// Parse each line
return chunk.toString().split('\n');
},
}),
new SmartDuplex({
objectMode: true,
writeFunction: async (lines: string[], tools) => {
// Filter and push matching lines
for (const line of lines) {
if (line.includes('ERROR')) {
await tools.push(line + '\n');
}
});
const streamWrapper = new StreamWrapper([readStream, transformStream, writeStream]);
await streamWrapper.run();
const outputContent = await smartfile.fs.promises.readFile('./test/assets/writabletext.txt', 'utf-8');
console.log('Output Content:', outputContent);
});
tap.start();
```
### Modular and Scoped Transformations
Creating modular and scoped transformations is straightforward with `SmartDuplex`:
```typescript
import { SmartDuplex } from '@push.rocks/smartstream';
type DataChunk = {
id: number;
data: string;
};
const transformationStream1 = new SmartDuplex<DataChunk, DataChunk>({
async writeFunction(chunk, { push }) {
chunk.data = chunk.data.toUpperCase();
push(chunk);
}
})
},
}),
fs.createWriteStream('./errors.log'),
]);
const transformationStream2 = new SmartDuplex<DataChunk, DataChunk>({
async writeFunction(chunk, { push }) {
chunk.data = `${chunk.data} processed with transformation 2`;
push(chunk);
}
});
const initialData: DataChunk[] = [
{ id: 1, data: 'first' },
{ id: 2, data: 'second' }
];
const intakeStream = new StreamIntake<DataChunk>();
intakeStream
.pipe(transformationStream1)
.pipe(transformationStream2)
.on('data', data => console.log('Transformed Data:', data));
initialData.forEach(item => intakeStream.pushData(item));
intakeStream.signalEnd();
await pipeline.run();
console.log('Error extraction complete');
```
By leveraging `SmartDuplex`, `StreamWrapper`, and `StreamIntake`, you can streamline and enhance your data transformation pipelines in Node.js with a clear, efficient, and backpressure-friendly approach.
```
## License and Legal Information
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
This repository contains open-source code licensed under the MIT License. A copy of the license can be found in the [LICENSE](./LICENSE) file.
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
### Trademarks
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH or third parties, and are not included within the scope of the MIT license granted herein.
Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines or the guidelines of the respective third-party owners, and any usage must be approved in writing. Third-party trademarks used herein are the property of their respective owners and used only in a descriptive manner, e.g. for an implementation of an API or similar.
### Company Information
Task Venture Capital GmbH
Registered at District court Bremen HRB 35230 HB, Germany
Registered at District Court Bremen HRB 35230 HB, Germany
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
For any legal inquiries or further information, please contact us via email at hello@task.vc.
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.

View File

@@ -65,4 +65,4 @@ tap.test('should run backpressure test', async (toolsArg) => {
await done.promise;
});
await tap.start();
export default tap.start();

View File

@@ -21,4 +21,4 @@ tap.test('should create a SmartStream from a Buffer', async () => {
});
});
tap.start();
export default tap.start();

View File

@@ -1,5 +1,5 @@
import { expect, tap } from '@push.rocks/tapbundle';
import * as smartfile from '@push.rocks/smartfile';
import * as fs from 'fs';
import * as smartstream from '../ts/index.js';
@@ -8,7 +8,7 @@ let testIntake: smartstream.StreamIntake<string>;
tap.test('should handle a read stream', async (tools) => {
const counter = 0;
const streamWrapper = new smartstream.StreamWrapper([
smartfile.fsStream.createReadStream('./test/assets/readabletext.txt'),
fs.createReadStream('./test/assets/readabletext.txt'),
new smartstream.SmartDuplex({
writeFunction: async (chunkStringArg: Buffer, streamTools) => {
// do something with the stream here
@@ -44,7 +44,7 @@ tap.test('should create a valid Intake', async (tools) => {
}
})
)
.pipe(smartfile.fsStream.createWriteStream('./test/assets/writabletext.txt'));
.pipe(fs.createWriteStream('./test/assets/writabletext.txt'));
const testFinished = tools.defer();
let counter = 0;
testIntake.pushNextObservable.subscribe(() => {
@@ -62,4 +62,4 @@ tap.test('should create a valid Intake', async (tools) => {
testIntake.signalEnd();
});
tap.start();
export default tap.start();

View File

@@ -1,4 +1,4 @@
import * as smartfile from '@push.rocks/smartfile';
import * as fs from 'fs';
import { expect, tap } from '@push.rocks/tapbundle';
import * as smartstream from '../ts/smartstream.classes.streamwrapper.js';
@@ -6,10 +6,10 @@ import * as smartstream from '../ts/smartstream.classes.streamwrapper.js';
let testSmartstream: smartstream.StreamWrapper;
tap.test('should combine a stream', async () => {
testSmartstream = new smartstream.StreamWrapper([
smartfile.fsStream.createReadStream('./test/assets/test.md'),
smartfile.fsStream.createWriteStream('./test/assets/testCopy.md'),
fs.createReadStream('./test/assets/test.md'),
fs.createWriteStream('./test/assets/testCopy.md'),
]);
await testSmartstream.run();
});
tap.start();
export default tap.start();

View File

@@ -64,4 +64,4 @@ tap.test('WebDuplexStream should handle transform with a write function', async
expect(output).toEqual(expectedOutput);
});
tap.start();
export default tap.start();

View File

@@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@push.rocks/smartstream',
version: '3.2.5',
version: '3.3.0',
description: 'A library to simplify the creation and manipulation of Node.js streams, providing utilities for handling transform, duplex, and readable/writable streams effectively in TypeScript.'
}

3
ts/tspublish.json Normal file
View File

@@ -0,0 +1,3 @@
{
"order": 1
}

View File

@@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@push.rocks/smartstream',
version: '3.2.5',
version: '3.3.0',
description: 'A library to simplify the creation and manipulation of Node.js streams, providing utilities for handling transform, duplex, and readable/writable streams effectively in TypeScript.'
}

View File

@@ -9,7 +9,7 @@ export {
const smartenvInstance = new smartenv.Smartenv();
await smartenvInstance.getSafeNodeModule<typeof import('stream/web')>('stream/web', async (moduleArg) => {
globalThis.ReadableStream = moduleArg.ReadableStream;
globalThis.WritableStream = moduleArg.WritableStream;
globalThis.TransformStream = moduleArg.TransformStream;
globalThis.ReadableStream = moduleArg.ReadableStream as any;
globalThis.WritableStream = moduleArg.WritableStream as any;
globalThis.TransformStream = moduleArg.TransformStream as any;
})

3
ts_web/tspublish.json Normal file
View File

@@ -0,0 +1,3 @@
{
"order": 0
}