Compare commits
113 Commits
Author | SHA1 | Date | |
---|---|---|---|
1cb6f727af | |||
824c44d165 | |||
3e062103f8 | |||
6451e93c12 | |||
70cf93595c | |||
17e03e9790 | |||
e52ce7af61 | |||
f548f4b6cb | |||
23a7a77a73 | |||
13d2fc78b8 | |||
898cc0407d | |||
8a3f43a11a | |||
da2191bb96 | |||
f13db1e422 | |||
42a90e804a | |||
413e2af717 | |||
267a76af13 | |||
7834b7e6d2 | |||
ae643708e7 | |||
d9d96b8bb7 | |||
a961eea431 | |||
edb58ade28 | |||
753a481765 | |||
bbbd1b73b9 | |||
271d0be106 | |||
0ceeacd5a0 | |||
287695e445 | |||
60f9e541a5 | |||
96ea67e135 | |||
ba0a2023ad | |||
a09c359847 | |||
e2b4d772b3 | |||
0f46b62b2d | |||
9bf37469c6 | |||
12bb125bdc | |||
703dc11c6c | |||
28725d1723 | |||
c77e0f2ba6 | |||
196fb6d396 | |||
df0ddf04b3 | |||
2e1aa4a8ff | |||
bc09033af0 | |||
22df9dfd94 | |||
d48ef6eb43 | |||
9421c652a2 | |||
a6ab15bf1d | |||
00d1455367 | |||
116a281c6c | |||
9bf6f251c4 | |||
e3427c2498 | |||
a400a0a04c | |||
91392e8bd5 | |||
d161d6613a | |||
7a14e67f4f | |||
465ccfec40 | |||
3adb16d1f8 | |||
a9230ca790 | |||
788f2665c2 | |||
7b678cc856 | |||
12c9d8cc9d | |||
3a2dc1c37e | |||
1f67bc0e1e | |||
b15ddd987c | |||
cc43080513 | |||
49d235411f | |||
d238662bea | |||
8efb2b1093 | |||
4926f57d83 | |||
86552f2b1b | |||
353a8ecde6 | |||
3e03b81a43 | |||
5e4ec5b837 | |||
62796f7151 | |||
2c1d9f05ce | |||
34cbf28972 | |||
1b6e38c040 | |||
b135e6023a | |||
91d01f3689 | |||
e8e067ea77 | |||
2cb490cd2a | |||
98397bb85e | |||
f52b0de21f | |||
1c0e5f264d | |||
8a3c653213 | |||
456ce78917 | |||
5277083097 | |||
8618ac55ef | |||
ea66d1b2fb | |||
c37f62abec | |||
2c904cc1ec | |||
d1561ad1b7 | |||
0ae3fee987 | |||
047c2bd402 | |||
9ed3de718f | |||
14530f393c | |||
15a226d30d | |||
16c5c89662 | |||
851a96c014 | |||
4ea42cb9fb | |||
41eed6423d | |||
0e067004a4 | |||
9fe222b500 | |||
05e9067a34 | |||
2aff46eb0e | |||
6aa4b86598 | |||
af30268551 | |||
e562e8f099 | |||
01f4a53b5b | |||
f42b77986f | |||
b7ef295757 | |||
2818420ee9 | |||
d759e2a562 | |||
65a97c9ee0 |
66
.gitea/workflows/default_nottags.yaml
Normal file
66
.gitea/workflows/default_nottags.yaml
Normal file
@ -0,0 +1,66 @@
|
||||
name: Default (not tags)
|
||||
|
||||
on:
|
||||
push:
|
||||
tags-ignore:
|
||||
- '**'
|
||||
|
||||
env:
|
||||
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
|
||||
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
|
||||
|
||||
jobs:
|
||||
security:
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install pnpm and npmci
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
|
||||
- name: Run npm prepare
|
||||
run: npmci npm prepare
|
||||
|
||||
- name: Audit production dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --prod
|
||||
continue-on-error: true
|
||||
|
||||
- name: Audit development dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --dev
|
||||
continue-on-error: true
|
||||
|
||||
test:
|
||||
if: ${{ always() }}
|
||||
needs: security
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Test stable
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm test
|
||||
|
||||
- name: Test build
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm build
|
124
.gitea/workflows/default_tags.yaml
Normal file
124
.gitea/workflows/default_tags.yaml
Normal file
@ -0,0 +1,124 @@
|
||||
name: Default (tags)
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
env:
|
||||
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
|
||||
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
|
||||
|
||||
jobs:
|
||||
security:
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Audit production dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --prod
|
||||
continue-on-error: true
|
||||
|
||||
- name: Audit development dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --dev
|
||||
continue-on-error: true
|
||||
|
||||
test:
|
||||
if: ${{ always() }}
|
||||
needs: security
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Test stable
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm test
|
||||
|
||||
- name: Test build
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm build
|
||||
|
||||
release:
|
||||
needs: test
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Release
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm publish
|
||||
|
||||
metadata:
|
||||
needs: test
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
continue-on-error: true
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Code quality
|
||||
run: |
|
||||
npmci command npm install -g typescript
|
||||
npmci npm install
|
||||
|
||||
- name: Trigger
|
||||
run: npmci trigger
|
||||
|
||||
- name: Build docs and upload artifacts
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
pnpm install -g @git.zone/tsdoc
|
||||
npmci command tsdoc
|
||||
continue-on-error: true
|
20
.gitignore
vendored
20
.gitignore
vendored
@ -1,4 +1,20 @@
|
||||
node_modules/
|
||||
.nogit/
|
||||
|
||||
# artifacts
|
||||
coverage/
|
||||
public/
|
||||
pages/
|
||||
coverage/
|
||||
|
||||
# installs
|
||||
node_modules/
|
||||
|
||||
# caches
|
||||
.yarn/
|
||||
.cache/
|
||||
.rpt2_cache
|
||||
|
||||
# builds
|
||||
dist/
|
||||
dist_*/
|
||||
|
||||
# custom
|
@ -1,72 +0,0 @@
|
||||
# gitzone standard
|
||||
image: hosttoday/ht-docker-node:npmci
|
||||
|
||||
cache:
|
||||
paths:
|
||||
- .yarn/
|
||||
key: "$CI_BUILD_STAGE"
|
||||
|
||||
stages:
|
||||
- test
|
||||
- release
|
||||
- trigger
|
||||
- pages
|
||||
|
||||
testLEGACY:
|
||||
stage: test
|
||||
script:
|
||||
- npmci test legacy
|
||||
coverage: /\d+.?\d+?\%\s*coverage/
|
||||
tags:
|
||||
- docker
|
||||
allow_failure: true
|
||||
|
||||
testLTS:
|
||||
stage: test
|
||||
script:
|
||||
- npmci test lts
|
||||
coverage: /\d+.?\d+?\%\s*coverage/
|
||||
tags:
|
||||
- docker
|
||||
|
||||
testSTABLE:
|
||||
stage: test
|
||||
script:
|
||||
- npmci test stable
|
||||
coverage: /\d+.?\d+?\%\s*coverage/
|
||||
tags:
|
||||
- docker
|
||||
|
||||
release:
|
||||
stage: release
|
||||
script:
|
||||
- npmci publish
|
||||
only:
|
||||
- tags
|
||||
tags:
|
||||
- docker
|
||||
|
||||
trigger:
|
||||
stage: trigger
|
||||
script:
|
||||
- npmci trigger
|
||||
only:
|
||||
- tags
|
||||
tags:
|
||||
- docker
|
||||
|
||||
pages:
|
||||
image: hosttoday/ht-docker-node:npmci
|
||||
stage: pages
|
||||
script:
|
||||
- npmci command yarn global add npmpage
|
||||
- npmci command npmpage
|
||||
tags:
|
||||
- docker
|
||||
only:
|
||||
- tags
|
||||
artifacts:
|
||||
expire_in: 1 week
|
||||
paths:
|
||||
- public
|
||||
allow_failure: true
|
11
.vscode/launch.json
vendored
Normal file
11
.vscode/launch.json
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"command": "npm test",
|
||||
"name": "Run npm test",
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
}
|
||||
]
|
||||
}
|
26
.vscode/settings.json
vendored
Normal file
26
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
{
|
||||
"json.schemas": [
|
||||
{
|
||||
"fileMatch": ["/npmextra.json"],
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"npmci": {
|
||||
"type": "object",
|
||||
"description": "settings for npmci"
|
||||
},
|
||||
"gitzone": {
|
||||
"type": "object",
|
||||
"description": "settings for gitzone",
|
||||
"properties": {
|
||||
"projectType": {
|
||||
"type": "string",
|
||||
"enum": ["website", "element", "service", "npm", "wcc"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
47
README.md
47
README.md
@ -1,47 +0,0 @@
|
||||
# smartstream
|
||||
simplifies access to node streams, TypeScript ready!
|
||||
|
||||
## Availabililty
|
||||
[](https://www.npmjs.com/package/smartstream)
|
||||
[](https://GitLab.com/pushrocks/smartstream)
|
||||
[](https://github.com/pushrocks/smartstream)
|
||||
[](https://pushrocks.gitlab.io/smartstream/)
|
||||
|
||||
## Status for master
|
||||
[](https://GitLab.com/pushrocks/smartstream/commits/master)
|
||||
[](https://GitLab.com/pushrocks/smartstream/commits/master)
|
||||
[](https://www.npmjs.com/package/smartstream)
|
||||
[](https://david-dm.org/pushrocks/smartstream)
|
||||
[](https://www.bithound.io/github/pushrocks/smartstream/master/dependencies/npm)
|
||||
[](https://www.bithound.io/github/pushrocks/smartstream)
|
||||
[](https://nodejs.org/dist/latest-v6.x/docs/api/)
|
||||
[](https://nodejs.org/dist/latest-v6.x/docs/api/)
|
||||
[](http://standardjs.com/)
|
||||
|
||||
## Usage
|
||||
Use TypeScript for best in class instellisense.
|
||||
|
||||
## Usage
|
||||
We recommend the use of TypeScript for best in class intellisense support.
|
||||
|
||||
```typescript
|
||||
import { Smartstream } from 'smartstream'
|
||||
import * as gUglify from 'gulp-uglify'
|
||||
|
||||
let mySmartstream = new Smartstream([
|
||||
gulp.src(['./file1.js','./file2.js']),
|
||||
gUglify(),
|
||||
gulp.dest('./some/output/path')
|
||||
])
|
||||
|
||||
mySmartstream.onError((err) => { /* handle error */ }) // handles all errors in stream
|
||||
myStream.onCustomEvent('myeventname', (args...) => { /* Do something */ }) // emit an custom event anywhere in your stream
|
||||
mySmartstream.run().then(() => {/* do something when stream is finished */})
|
||||
```
|
||||
|
||||
For further information read the linked docs at the top of this README.
|
||||
|
||||
> MIT licensed | **©** [Lossless GmbH](https://lossless.gmbh)
|
||||
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy.html)
|
||||
|
||||
[](https://lossless.com)
|
38
dist/index.d.ts
vendored
38
dist/index.d.ts
vendored
@ -1,38 +0,0 @@
|
||||
/// <reference types="node" />
|
||||
import { Transform } from 'stream';
|
||||
export interface IErrorFunction {
|
||||
(err: any): any;
|
||||
}
|
||||
export interface ICustomEventFunction {
|
||||
(): any;
|
||||
}
|
||||
export interface ICustomEventObject {
|
||||
eventName: string;
|
||||
eventFunction: ICustomEventFunction;
|
||||
}
|
||||
/**
|
||||
* class Smartstream handles
|
||||
*/
|
||||
export declare class Smartstream {
|
||||
private streamArray;
|
||||
private customEventObjectArray;
|
||||
private streamStartedDeferred;
|
||||
/**
|
||||
* constructor
|
||||
*/
|
||||
constructor(streamArrayArg: any[]);
|
||||
/**
|
||||
* make something with the stream itself
|
||||
*/
|
||||
streamStarted(): Promise<any>;
|
||||
/**
|
||||
* attach listener to custom event
|
||||
*/
|
||||
onCustomEvent(eventNameArg: string, eventFunctionArg: ICustomEventFunction): void;
|
||||
/**
|
||||
* run the stream
|
||||
* @returns Promise
|
||||
*/
|
||||
run(): Promise<void>;
|
||||
}
|
||||
export declare let cleanPipe: () => Transform;
|
82
dist/index.js
vendored
82
dist/index.js
vendored
@ -1,82 +0,0 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const plugins = require("./smartstream.plugins");
|
||||
/**
|
||||
* class Smartstream handles
|
||||
*/
|
||||
class Smartstream {
|
||||
/**
|
||||
* constructor
|
||||
*/
|
||||
constructor(streamArrayArg) {
|
||||
this.streamArray = [];
|
||||
this.customEventObjectArray = [];
|
||||
this.streamStartedDeferred = plugins.q.defer();
|
||||
this.streamArray = streamArrayArg;
|
||||
}
|
||||
/**
|
||||
* make something with the stream itself
|
||||
*/
|
||||
streamStarted() {
|
||||
return this.streamStartedDeferred.promise;
|
||||
}
|
||||
/**
|
||||
* attach listener to custom event
|
||||
*/
|
||||
onCustomEvent(eventNameArg, eventFunctionArg) {
|
||||
this.customEventObjectArray.push({
|
||||
eventName: eventNameArg,
|
||||
eventFunction: eventFunctionArg
|
||||
});
|
||||
}
|
||||
/**
|
||||
* run the stream
|
||||
* @returns Promise
|
||||
*/
|
||||
run() {
|
||||
let done = plugins.q.defer();
|
||||
// clone Array
|
||||
let streamExecutionArray = [];
|
||||
for (let streamItem of this.streamArray) {
|
||||
streamExecutionArray.push(streamItem);
|
||||
}
|
||||
// combine the stream
|
||||
let finalStream = null;
|
||||
let firstIteration = true;
|
||||
for (let stream of streamExecutionArray) {
|
||||
if (firstIteration === true) {
|
||||
finalStream = stream;
|
||||
}
|
||||
stream.on('error', (err) => {
|
||||
done.reject(err);
|
||||
});
|
||||
for (let customEventObject of this.customEventObjectArray) {
|
||||
stream.on(customEventObject.eventName, customEventObject.eventFunction);
|
||||
}
|
||||
if (!firstIteration) {
|
||||
finalStream = finalStream.pipe(stream);
|
||||
}
|
||||
firstIteration = false;
|
||||
}
|
||||
this.streamStartedDeferred.resolve();
|
||||
finalStream.on('end', function () {
|
||||
done.resolve();
|
||||
});
|
||||
finalStream.on('close', function () {
|
||||
done.resolve();
|
||||
});
|
||||
finalStream.on('finish', function () {
|
||||
done.resolve();
|
||||
});
|
||||
return done.promise;
|
||||
}
|
||||
}
|
||||
exports.Smartstream = Smartstream;
|
||||
exports.cleanPipe = () => {
|
||||
return plugins.through2.obj((file, enc, cb) => {
|
||||
cb();
|
||||
}, (cb) => {
|
||||
cb();
|
||||
});
|
||||
};
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiOztBQUFBLGlEQUFnRDtBQWtCaEQ7O0dBRUc7QUFDSDtJQUtFOztPQUVHO0lBQ0gsWUFBWSxjQUFxQjtRQVB6QixnQkFBVyxHQUFHLEVBQUUsQ0FBQTtRQUNoQiwyQkFBc0IsR0FBeUIsRUFBRSxDQUFBO1FBQ2pELDBCQUFxQixHQUFHLE9BQU8sQ0FBQyxDQUFDLENBQUMsS0FBSyxFQUFFLENBQUE7UUFNL0MsSUFBSSxDQUFDLFdBQVcsR0FBRyxjQUFjLENBQUE7SUFDbkMsQ0FBQztJQUVEOztPQUVHO0lBQ0gsYUFBYTtRQUNYLE1BQU0sQ0FBQyxJQUFJLENBQUMscUJBQXFCLENBQUMsT0FBTyxDQUFBO0lBQzNDLENBQUM7SUFFRDs7T0FFRztJQUNILGFBQWEsQ0FBRSxZQUFvQixFQUFFLGdCQUFzQztRQUN6RSxJQUFJLENBQUMsc0JBQXNCLENBQUMsSUFBSSxDQUFDO1lBQy9CLFNBQVMsRUFBRSxZQUFZO1lBQ3ZCLGFBQWEsRUFBRSxnQkFBZ0I7U0FDaEMsQ0FBQyxDQUFBO0lBQ0osQ0FBQztJQUVEOzs7T0FHRztJQUNILEdBQUc7UUFDRCxJQUFJLElBQUksR0FBRyxPQUFPLENBQUMsQ0FBQyxDQUFDLEtBQUssRUFBUSxDQUFBO1FBRWxDLGNBQWM7UUFDZCxJQUFJLG9CQUFvQixHQUFHLEVBQUUsQ0FBQTtRQUM3QixHQUFHLENBQUMsQ0FBQyxJQUFJLFVBQVUsSUFBSSxJQUFJLENBQUMsV0FBVyxDQUFDLENBQUMsQ0FBQztZQUFDLG9CQUFvQixDQUFDLElBQUksQ0FBQyxVQUFVLENBQUMsQ0FBQTtRQUFDLENBQUM7UUFFbEYscUJBQXFCO1FBQ3JCLElBQUksV0FBVyxHQUFHLElBQUksQ0FBQTtRQUN0QixJQUFJLGNBQWMsR0FBWSxJQUFJLENBQUE7UUFDbEMsR0FBRyxDQUFDLENBQUMsSUFBSSxNQUFNLElBQUksb0JBQW9CLENBQUMsQ0FBQyxDQUFDO1lBQ3hDLEVBQUUsQ0FBQyxDQUFDLGNBQWMsS0FBSyxJQUFJLENBQUMsQ0FBQyxDQUFDO2dCQUM1QixXQUFXLEdBQUcsTUFBTSxDQUFBO1lBQ3RCLENBQUM7WUFDRCxNQUFNLENBQUMsRUFBRSxDQUFDLE9BQU8sRUFBRSxDQUFDLEdBQUc7Z0JBQ3JCLElBQUksQ0FBQyxNQUFNLENBQUMsR0FBRyxDQUFDLENBQUE7WUFDbEIsQ0FBQyxDQUFDLENBQUE7WUFDRixHQUFHLENBQUMsQ0FBQyxJQUFJLGlCQUFpQixJQUFJLElBQUksQ0FBQyxzQkFBc0IsQ0FBQyxDQUFDLENBQUM7Z0JBQzFELE1BQU0sQ0FBQyxFQUFFLENBQUMsaUJBQWlCLENBQUMsU0FBUyxFQUFFLGlCQUFpQixDQUFDLGFBQWEsQ0FBQyxDQUFBO1lBQ3pFLENBQUM7WUFDRCxFQUFFLENBQUMsQ0FBQyxDQUFDLGNBQWMsQ0FBQyxDQUFDLENBQUM7Z0JBQ3BCLFdBQVcsR0FBRyxXQUFXLENBQUMsSUFBSSxDQUFDLE1BQU0sQ0FBQyxDQUFBO1lBQ3hDLENBQUM7WUFDRCxjQUFjLEdBQUcsS0FBSyxDQUFBO1FBQ3hCLENBQUM7UUFFRCxJQUFJLENBQUMscUJBQXFCLENBQUMsT0FBTyxFQUFFLENBQUE7UUFFcEMsV0FBVyxDQUFDLEVBQUUsQ0FBQyxLQUFLLEVBQUU7WUFDcEIsSUFBSSxDQUFDLE9BQU8sRUFBRSxDQUFBO1FBQ2hCLENBQUMsQ0FBQyxDQUFBO1FBQ0YsV0FBVyxDQUFDLEVBQUUsQ0FBQyxPQUFPLEVBQUU7WUFDdEIsSUFBSSxDQUFDLE9BQU8sRUFBRSxDQUFBO1FBQ2hCLENBQUMsQ0FBQyxDQUFBO1FBQ0YsV0FBVyxDQUFDLEVBQUUsQ0FBQyxRQUFRLEVBQUU7WUFDdkIsSUFBSSxDQUFDLE9BQU8sRUFBRSxDQUFBO1FBQ2hCLENBQUMsQ0FBQyxDQUFBO1FBQ0YsTUFBTSxDQUFDLElBQUksQ0FBQyxPQUFPLENBQUE7SUFDckIsQ0FBQztDQUNGO0FBeEVELGtDQXdFQztBQUVVLFFBQUEsU0FBUyxHQUFHO0lBQ3JCLE1BQU0sQ0FBQyxPQUFPLENBQUMsUUFBUSxDQUFDLEdBQUcsQ0FDekIsQ0FBQyxJQUFJLEVBQUUsR0FBRyxFQUFFLEVBQUU7UUFDWixFQUFFLEVBQUUsQ0FBQTtJQUNOLENBQUMsRUFDRCxDQUFDLEVBQUU7UUFDRCxFQUFFLEVBQUUsQ0FBQTtJQUNOLENBQUMsQ0FDRixDQUFBO0FBQ0gsQ0FBQyxDQUFBIn0=
|
3
dist/smartstream.plugins.d.ts
vendored
3
dist/smartstream.plugins.d.ts
vendored
@ -1,3 +0,0 @@
|
||||
import 'typings-global';
|
||||
export import q = require('smartq');
|
||||
export import through2 = require('through2');
|
6
dist/smartstream.plugins.js
vendored
6
dist/smartstream.plugins.js
vendored
@ -1,6 +0,0 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
require("typings-global");
|
||||
exports.q = require("smartq");
|
||||
exports.through2 = require("through2");
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic21hcnRzdHJlYW0ucGx1Z2lucy5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3RzL3NtYXJ0c3RyZWFtLnBsdWdpbnMudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6Ijs7QUFBQSwwQkFBdUI7QUFDdkIsOEJBQW1DO0FBQ25DLHVDQUE0QyJ9
|
@ -1,47 +0,0 @@
|
||||
# smartstream
|
||||
simplifies access to node streams, TypeScript ready!
|
||||
|
||||
## Availabililty
|
||||
[](https://www.npmjs.com/package/smartstream)
|
||||
[](https://GitLab.com/pushrocks/smartstream)
|
||||
[](https://github.com/pushrocks/smartstream)
|
||||
[](https://pushrocks.gitlab.io/smartstream/)
|
||||
|
||||
## Status for master
|
||||
[](https://GitLab.com/pushrocks/smartstream/commits/master)
|
||||
[](https://GitLab.com/pushrocks/smartstream/commits/master)
|
||||
[](https://www.npmjs.com/package/smartstream)
|
||||
[](https://david-dm.org/pushrocks/smartstream)
|
||||
[](https://www.bithound.io/github/pushrocks/smartstream/master/dependencies/npm)
|
||||
[](https://www.bithound.io/github/pushrocks/smartstream)
|
||||
[](https://nodejs.org/dist/latest-v6.x/docs/api/)
|
||||
[](https://nodejs.org/dist/latest-v6.x/docs/api/)
|
||||
[](http://standardjs.com/)
|
||||
|
||||
## Usage
|
||||
Use TypeScript for best in class instellisense.
|
||||
|
||||
## Usage
|
||||
We recommend the use of TypeScript for best in class intellisense support.
|
||||
|
||||
```typescript
|
||||
import { Smartstream } from 'smartstream'
|
||||
import * as gUglify from 'gulp-uglify'
|
||||
|
||||
let mySmartstream = new Smartstream([
|
||||
gulp.src(['./file1.js','./file2.js']),
|
||||
gUglify(),
|
||||
gulp.dest('./some/output/path')
|
||||
])
|
||||
|
||||
mySmartstream.onError((err) => { /* handle error */ }) // handles all errors in stream
|
||||
myStream.onCustomEvent('myeventname', (args...) => { /* Do something */ }) // emit an custom event anywhere in your stream
|
||||
mySmartstream.run().then(() => {/* do something when stream is finished */})
|
||||
```
|
||||
|
||||
For further information read the linked docs at the top of this README.
|
||||
|
||||
> MIT licensed | **©** [Lossless GmbH](https://lossless.gmbh)
|
||||
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy.html)
|
||||
|
||||
[](https://lossless.com)
|
@ -1,7 +1,39 @@
|
||||
{
|
||||
"npmci": {
|
||||
"globalNpmTools": [
|
||||
"npmts"
|
||||
]
|
||||
"npmGlobalTools": [],
|
||||
"npmAccessLevel": "public"
|
||||
},
|
||||
"gitzone": {
|
||||
"projectType": "npm",
|
||||
"module": {
|
||||
"githost": "code.foss.global",
|
||||
"gitscope": "push.rocks",
|
||||
"gitrepo": "smartstream",
|
||||
"description": "A library to simplify the creation and manipulation of Node.js streams, providing utilities for handling transform, duplex, and readable/writable streams effectively in TypeScript.",
|
||||
"npmPackagename": "@push.rocks/smartstream",
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"stream",
|
||||
"node.js",
|
||||
"typescript",
|
||||
"stream manipulation",
|
||||
"data processing",
|
||||
"pipeline",
|
||||
"async transformation",
|
||||
"event handling",
|
||||
"backpressure",
|
||||
"readable stream",
|
||||
"writable stream",
|
||||
"duplex stream",
|
||||
"transform stream",
|
||||
"file streaming",
|
||||
"buffer",
|
||||
"stream utilities",
|
||||
"esm"
|
||||
]
|
||||
}
|
||||
},
|
||||
"tsdoc": {
|
||||
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
||||
}
|
||||
}
|
74
package.json
74
package.json
@ -1,29 +1,73 @@
|
||||
{
|
||||
"name": "smartstream",
|
||||
"version": "1.0.11",
|
||||
"description": "simplifies access to node streams, TypeScript ready!",
|
||||
"main": "dist/index.js",
|
||||
"typings": "dist/index.d.ts",
|
||||
"name": "@push.rocks/smartstream",
|
||||
"version": "3.0.41",
|
||||
"private": false,
|
||||
"description": "A library to simplify the creation and manipulation of Node.js streams, providing utilities for handling transform, duplex, and readable/writable streams effectively in TypeScript.",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": "./dist_ts/index.js",
|
||||
"./web": "./dist_ts_web/index.js"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "(npmts)"
|
||||
"test": "(tstest test/)",
|
||||
"build": "(tsbuild tsfolders --web --allowimplicitany)"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+ssh://git@gitlab.com/pushrocks/smartstream.git"
|
||||
"url": "https://code.foss.global/push.rocks/smartstream.git"
|
||||
},
|
||||
"author": "Lossless GmbH",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://gitlab.com/pushrocks/smartstream/issues"
|
||||
"url": "https://gitlab.com/push.rocks/smartstream/issues"
|
||||
},
|
||||
"homepage": "https://gitlab.com/pushrocks/smartstream#README",
|
||||
"homepage": "https://code.foss.global/push.rocks/smartstream",
|
||||
"devDependencies": {
|
||||
"tapbundle": "^1.1.0"
|
||||
"@git.zone/tsbuild": "^2.1.80",
|
||||
"@git.zone/tsrun": "^1.2.44",
|
||||
"@git.zone/tstest": "^1.0.90",
|
||||
"@push.rocks/smartfile": "^11.0.15",
|
||||
"@push.rocks/tapbundle": "^5.0.23",
|
||||
"@types/node": "^20.12.12"
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/through2": "^2.0.33",
|
||||
"smartq": "^1.1.6",
|
||||
"through2": "^2.0.3",
|
||||
"typings-global": "^1.0.19"
|
||||
}
|
||||
"@push.rocks/lik": "^6.0.15",
|
||||
"@push.rocks/smartenv": "^5.0.12",
|
||||
"@push.rocks/smartpromise": "^4.0.3",
|
||||
"@push.rocks/smartrx": "^3.0.7"
|
||||
},
|
||||
"browserslist": [
|
||||
"last 1 chrome versions"
|
||||
],
|
||||
"files": [
|
||||
"ts/**/*",
|
||||
"ts_web/**/*",
|
||||
"dist/**/*",
|
||||
"dist_*/**/*",
|
||||
"dist_ts/**/*",
|
||||
"dist_ts_web/**/*",
|
||||
"assets/**/*",
|
||||
"cli.js",
|
||||
"npmextra.json",
|
||||
"readme.md"
|
||||
],
|
||||
"keywords": [
|
||||
"stream",
|
||||
"node.js",
|
||||
"typescript",
|
||||
"stream manipulation",
|
||||
"data processing",
|
||||
"pipeline",
|
||||
"async transformation",
|
||||
"event handling",
|
||||
"backpressure",
|
||||
"readable stream",
|
||||
"writable stream",
|
||||
"duplex stream",
|
||||
"transform stream",
|
||||
"file streaming",
|
||||
"buffer",
|
||||
"stream utilities",
|
||||
"esm"
|
||||
]
|
||||
}
|
||||
|
6894
pnpm-lock.yaml
generated
Normal file
6894
pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
1
readme.hints.md
Normal file
1
readme.hints.md
Normal file
@ -0,0 +1 @@
|
||||
- make sure to respect backpressure handling.
|
375
readme.md
Normal file
375
readme.md
Normal file
@ -0,0 +1,375 @@
|
||||
```markdown
|
||||
# @push.rocks/smartstream
|
||||
A TypeScript library to simplify the creation and manipulation of Node.js streams, providing utilities for transform, duplex, and readable/writable stream handling while managing backpressure effectively.
|
||||
|
||||
## Install
|
||||
To install `@push.rocks/smartstream`, you can use npm or yarn as follows:
|
||||
|
||||
```bash
|
||||
npm install @push.rocks/smartstream --save
|
||||
# OR
|
||||
yarn add @push.rocks/smartstream
|
||||
```
|
||||
|
||||
This will add `@push.rocks/smartstream` to your project's dependencies.
|
||||
|
||||
## Usage
|
||||
|
||||
The `@push.rocks/smartstream` module is designed to simplify working with Node.js streams by providing a set of utilities for creating and manipulating streams. This module makes extensive use of TypeScript for improved code quality, readability, and maintenance. ESM syntax is utilized throughout the examples.
|
||||
|
||||
### Importing the Module
|
||||
|
||||
Start by importing the module into your TypeScript file:
|
||||
|
||||
```typescript
|
||||
import * as smartstream from '@push.rocks/smartstream';
|
||||
```
|
||||
|
||||
For a more specific import, you may do the following:
|
||||
|
||||
```typescript
|
||||
import { SmartDuplex, StreamWrapper, StreamIntake, createTransformFunction, createPassThrough } from '@push.rocks/smartstream';
|
||||
```
|
||||
|
||||
### Creating Basic Transform Streams
|
||||
|
||||
The module provides utilities for creating transform streams. For example, to create a transform stream that modifies chunks of data, you can use the `createTransformFunction` utility:
|
||||
|
||||
```typescript
|
||||
import { createTransformFunction } from '@push.rocks/smartstream';
|
||||
|
||||
const upperCaseTransform = createTransformFunction<string, string>(async (chunk) => {
|
||||
return chunk.toUpperCase();
|
||||
});
|
||||
|
||||
// Usage with pipe
|
||||
readableStream
|
||||
.pipe(upperCaseTransform)
|
||||
.pipe(writableStream);
|
||||
```
|
||||
|
||||
### Handling Backpressure with SmartDuplex
|
||||
|
||||
`SmartDuplex` is a powerful part of the `smartstream` module designed to handle backpressure effectively. Here's an example of how to create a `SmartDuplex` stream that processes data and respects the consumer's pace:
|
||||
|
||||
```typescript
|
||||
import { SmartDuplex } from '@push.rocks/smartstream';
|
||||
|
||||
const processDataDuplex = new SmartDuplex({
|
||||
async writeFunction(chunk, { push }) {
|
||||
const processedChunk = await processChunk(chunk); // Assume this is a defined asynchronous function
|
||||
push(processedChunk);
|
||||
}
|
||||
});
|
||||
|
||||
sourceStream.pipe(processDataDuplex).pipe(destinationStream);
|
||||
```
|
||||
|
||||
### Combining Multiple Streams
|
||||
|
||||
`Smartstream` facilitates easy combining of multiple streams into a single pipeline, handling errors and cleanup automatically. Here's how you can combine multiple streams:
|
||||
|
||||
```typescript
|
||||
import { StreamWrapper } from '@push.rocks/smartstream';
|
||||
|
||||
const combinedStream = new StreamWrapper([
|
||||
readStream, // Source stream
|
||||
transformStream1, // Transformation
|
||||
transformStream2, // Another transformation
|
||||
writeStream // Destination stream
|
||||
]);
|
||||
|
||||
combinedStream.run()
|
||||
.then(() => console.log('Processing completed.'))
|
||||
.catch(err => console.error('An error occurred:', err));
|
||||
```
|
||||
|
||||
### Working with StreamIntake
|
||||
|
||||
`StreamIntake` allows for more dynamic control of the reading process, facilitating scenarios where data is not continuously available:
|
||||
|
||||
```typescript
|
||||
import { StreamIntake } from '@push.rocks/smartstream';
|
||||
|
||||
const streamIntake = new StreamIntake<string>();
|
||||
|
||||
// Dynamically push data into the intake
|
||||
streamIntake.pushData('Hello, World!');
|
||||
streamIntake.pushData('Another message');
|
||||
|
||||
// Signal end when no more data is to be pushed
|
||||
streamIntake.signalEnd();
|
||||
```
|
||||
|
||||
### Real-world Scenario: Processing Large Files
|
||||
|
||||
Consider a scenario where you need to process a large CSV file, transform the data row-by-row, and then write the results to a database or another file. With `smartstream`, you could create a pipe that reads the CSV, processes each row, and handles backpressure, ensuring efficient use of resources.
|
||||
|
||||
```typescript
|
||||
import { SmartDuplex, createTransformFunction } from '@push.rocks/smartstream';
|
||||
import fs from 'fs';
|
||||
import csvParser from 'csv-parser';
|
||||
|
||||
const csvReadTransform = createTransformFunction<any, any>(async (row) => {
|
||||
// Process row
|
||||
return processedRow;
|
||||
});
|
||||
|
||||
fs.createReadStream('path/to/largeFile.csv')
|
||||
.pipe(csvParser())
|
||||
.pipe(csvReadTransform)
|
||||
.pipe(new SmartDuplex({
|
||||
async writeFunction(chunk, { push }) {
|
||||
await writeToDatabase(chunk); // Assume this writes to a database
|
||||
}
|
||||
}))
|
||||
.on('finish', () => console.log('File processed successfully.'));
|
||||
```
|
||||
|
||||
This example demonstrates reading a large CSV file, transforming each row with `createTransformFunction`, and using a `SmartDuplex` to manage the processed data flow efficiently, ensuring no data is lost due to backpressure issues.
|
||||
|
||||
### Advanced Use Case: Backpressure Handling
|
||||
|
||||
Effective backpressure handling is crucial when working with streams to avoid overwhelming the downstream consumers. Here’s a comprehensive example that demonstrates handling backpressure in a pipeline with multiple `SmartDuplex` instances:
|
||||
|
||||
```typescript
|
||||
import { SmartDuplex } from '@push.rocks/smartstream';
|
||||
|
||||
// Define the first SmartDuplex, which writes data slowly to simulate backpressure
|
||||
const slowProcessingStream = new SmartDuplex({
|
||||
name: 'SlowProcessor',
|
||||
objectMode: true,
|
||||
writeFunction: async (chunk, { push }) => {
|
||||
await new Promise(resolve => setTimeout(resolve, 100)); // Simulated delay
|
||||
console.log('Processed chunk:', chunk);
|
||||
push(chunk);
|
||||
}
|
||||
});
|
||||
|
||||
// Define the second SmartDuplex as a fast processor
|
||||
const fastProcessingStream = new SmartDuplex({
|
||||
name: 'FastProcessor',
|
||||
objectMode: true,
|
||||
writeFunction: async (chunk, { push }) => {
|
||||
console.log('Fast processing chunk:', chunk);
|
||||
push(chunk);
|
||||
}
|
||||
});
|
||||
|
||||
// Create a StreamIntake to dynamically handle incoming data
|
||||
const streamIntake = new StreamIntake<string>();
|
||||
|
||||
// Chain the streams together and handle the backpressure scenario
|
||||
streamIntake
|
||||
.pipe(fastProcessingStream)
|
||||
.pipe(slowProcessingStream)
|
||||
.pipe(createPassThrough()) // Use Pass-Through to provide intermediary handling
|
||||
.on('data', data => console.log('Final output:', data))
|
||||
.on('error', error => console.error('Stream encountered an error:', error));
|
||||
|
||||
// Simulate data pushing with intervals to observe backpressure handling
|
||||
let counter = 0;
|
||||
const interval = setInterval(() => {
|
||||
if (counter >= 10) {
|
||||
streamIntake.signalEnd();
|
||||
clearInterval(interval);
|
||||
} else {
|
||||
streamIntake.pushData(`Chunk ${counter}`);
|
||||
counter++;
|
||||
}
|
||||
}, 50);
|
||||
```
|
||||
|
||||
In this advanced use case, a `SlowProcessor` and `FastProcessor` are created using `SmartDuplex`, simulating a situation where one stream is slower than another. The `StreamIntake` dynamically handles incoming chunks of data and the intermediary Pass-Through handles any potential interruptions.
|
||||
|
||||
### Transform Streams in Parallel
|
||||
|
||||
For scenarios where you need to process data in parallel:
|
||||
|
||||
```typescript
|
||||
import { SmartDuplex, createTransformFunction } from '@push.rocks/smartstream';
|
||||
|
||||
const parallelTransform = createTransformFunction<any, any>(async (chunk) => {
|
||||
// Parallel Processing
|
||||
const results = await Promise.all(chunk.map(async item => await processItem(item)));
|
||||
return results;
|
||||
});
|
||||
|
||||
const streamIntake = new StreamIntake<any[]>();
|
||||
|
||||
streamIntake
|
||||
.pipe(parallelTransform)
|
||||
.pipe(new SmartDuplex({
|
||||
async writeFunction(chunk, { push }) {
|
||||
console.log('Processed parallel chunk:', chunk);
|
||||
push(chunk);
|
||||
}
|
||||
}))
|
||||
.on('finish', () => console.log('Parallel processing completed.'));
|
||||
|
||||
// Simulate data pushing
|
||||
streamIntake.pushData([1, 2, 3, 4]);
|
||||
streamIntake.pushData([5, 6, 7, 8]);
|
||||
streamIntake.signalEnd();
|
||||
```
|
||||
|
||||
### Error Handling in Stream Pipelines
|
||||
|
||||
Error handling is an essential part of working with streams. The `StreamWrapper` assists in combining multiple streams while managing errors seamlessly:
|
||||
|
||||
```typescript
|
||||
import { StreamWrapper } from '@push.rocks/smartstream';
|
||||
|
||||
const faultyStream = new SmartDuplex({
|
||||
async writeFunction(chunk, { push }) {
|
||||
if (chunk === 'bad data') {
|
||||
throw new Error('Faulty data encountered');
|
||||
}
|
||||
push(chunk);
|
||||
}
|
||||
});
|
||||
|
||||
const readStream = new StreamIntake<string>();
|
||||
const writeStream = new SmartDuplex({
|
||||
async writeFunction(chunk) {
|
||||
console.log('Written chunk:', chunk);
|
||||
}
|
||||
});
|
||||
|
||||
const combinedStream = new StreamWrapper([readStream, faultyStream, writeStream]);
|
||||
|
||||
combinedStream.run()
|
||||
.then(() => console.log('Stream processing completed.'))
|
||||
.catch(err => console.error('Stream error:', err.message));
|
||||
|
||||
// Push Data
|
||||
readStream.pushData('good data');
|
||||
readStream.pushData('bad data'); // This will throw an error
|
||||
readStream.pushData('more good data');
|
||||
readStream.signalEnd();
|
||||
```
|
||||
|
||||
### Testing Streams
|
||||
|
||||
Here's an example test case using the `tap` testing framework to verify the integrity of the `SmartDuplex` from a buffer:
|
||||
|
||||
```typescript
|
||||
import { expect, tap } from '@push.rocks/tapbundle';
|
||||
import { SmartDuplex } from '@push.rocks/smartstream';
|
||||
|
||||
tap.test('should create a SmartStream from a Buffer', async () => {
|
||||
const bufferData = Buffer.from('This is a test buffer');
|
||||
const smartStream = SmartDuplex.fromBuffer(bufferData, {});
|
||||
|
||||
let receivedData = Buffer.alloc(0);
|
||||
|
||||
return new Promise<void>((resolve) => {
|
||||
smartStream.on('data', (chunk: Buffer) => {
|
||||
receivedData = Buffer.concat([receivedData, chunk]);
|
||||
});
|
||||
|
||||
smartStream.on('end', () => {
|
||||
expect(receivedData.toString()).toEqual(bufferData.toString());
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
tap.start();
|
||||
```
|
||||
|
||||
### Working with Files and Buffers
|
||||
|
||||
You can easily stream files and buffers with `smartstream`. Here’s a test illustrating reading and writing with file streams using `smartfile` combined with `smartstream` utilities:
|
||||
|
||||
```typescript
|
||||
import { tap } from '@push.rocks/tapbundle';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import { SmartDuplex, StreamWrapper } from '@push.rocks/smartstream';
|
||||
|
||||
tap.test('should handle file read and write streams', async () => {
|
||||
const readStream = smartfile.fsStream.createReadStream('./test/assets/readabletext.txt');
|
||||
const writeStream = smartfile.fsStream.createWriteStream('./test/assets/writabletext.txt');
|
||||
|
||||
const transformStream = new SmartDuplex({
|
||||
async writeFunction(chunk, { push }) {
|
||||
const transformedChunk = chunk.toString().toUpperCase();
|
||||
push(transformedChunk);
|
||||
}
|
||||
});
|
||||
|
||||
const streamWrapper = new StreamWrapper([readStream, transformStream, writeStream]);
|
||||
|
||||
await streamWrapper.run();
|
||||
|
||||
const outputContent = await smartfile.fs.promises.readFile('./test/assets/writabletext.txt', 'utf-8');
|
||||
console.log('Output Content:', outputContent);
|
||||
});
|
||||
|
||||
tap.start();
|
||||
```
|
||||
|
||||
### Modular and Scoped Transformations
|
||||
|
||||
Creating modular and scoped transformations is straightforward with `SmartDuplex`:
|
||||
|
||||
```typescript
|
||||
import { SmartDuplex } from '@push.rocks/smartstream';
|
||||
|
||||
type DataChunk = {
|
||||
id: number;
|
||||
data: string;
|
||||
};
|
||||
|
||||
const transformationStream1 = new SmartDuplex<DataChunk, DataChunk>({
|
||||
async writeFunction(chunk, { push }) {
|
||||
chunk.data = chunk.data.toUpperCase();
|
||||
push(chunk);
|
||||
}
|
||||
})
|
||||
|
||||
const transformationStream2 = new SmartDuplex<DataChunk, DataChunk>({
|
||||
async writeFunction(chunk, { push }) {
|
||||
chunk.data = `${chunk.data} processed with transformation 2`;
|
||||
push(chunk);
|
||||
}
|
||||
});
|
||||
|
||||
const initialData: DataChunk[] = [
|
||||
{ id: 1, data: 'first' },
|
||||
{ id: 2, data: 'second' }
|
||||
];
|
||||
|
||||
const intakeStream = new StreamIntake<DataChunk>();
|
||||
|
||||
intakeStream
|
||||
.pipe(transformationStream1)
|
||||
.pipe(transformationStream2)
|
||||
.on('data', data => console.log('Transformed Data:', data));
|
||||
|
||||
initialData.forEach(item => intakeStream.pushData(item));
|
||||
intakeStream.signalEnd();
|
||||
```
|
||||
|
||||
By leveraging `SmartDuplex`, `StreamWrapper`, and `StreamIntake`, you can streamline and enhance your data transformation pipelines in Node.js with a clear, efficient, and backpressure-friendly approach.
|
||||
```
|
||||
|
||||
|
||||
## License and Legal Information
|
||||
|
||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
||||
|
||||
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||
|
||||
### Trademarks
|
||||
|
||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
|
||||
|
||||
### Company Information
|
||||
|
||||
Task Venture Capital GmbH
|
||||
Registered at District court Bremen HRB 35230 HB, Germany
|
||||
|
||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
||||
|
||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
0
readme_instructions.md
Normal file
0
readme_instructions.md
Normal file
6210
test/assets/readabletext.txt
Normal file
6210
test/assets/readabletext.txt
Normal file
File diff suppressed because it is too large
Load Diff
50
test/assets/writabletext.txt
Normal file
50
test/assets/writabletext.txt
Normal file
@ -0,0 +1,50 @@
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
||||
hi+wow
|
15
test/test.ts
15
test/test.ts
@ -1,15 +0,0 @@
|
||||
import fs = require('fs')
|
||||
import { expect, tap } from 'tapbundle'
|
||||
|
||||
import * as smartstream from '../dist/index'
|
||||
|
||||
let testSmartstream: smartstream.Smartstream
|
||||
tap.test('should combine a stream', async () => {
|
||||
testSmartstream = new smartstream.Smartstream([
|
||||
fs.createReadStream('./test/assets/test.md'),
|
||||
fs.createWriteStream('./test/assets/testCopy.md')
|
||||
])
|
||||
await testSmartstream.run()
|
||||
})
|
||||
|
||||
tap.start()
|
68
test/test.ts.backpressure.ts
Normal file
68
test/test.ts.backpressure.ts
Normal file
@ -0,0 +1,68 @@
|
||||
import { tap, expect } from '@push.rocks/tapbundle';
|
||||
import { SmartDuplex, type ISmartDuplexOptions, StreamWrapper } from '../ts/index.js';
|
||||
|
||||
tap.test('should run backpressure test', async (toolsArg) => {
|
||||
const done = toolsArg.defer();
|
||||
async function testBackpressure() {
|
||||
const stream1 = new SmartDuplex({
|
||||
name: 'stream1',
|
||||
objectMode: true,
|
||||
writeFunction: async (chunk, tools) => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 10)); // Slow processing
|
||||
console.log(`processed chunk ${chunk} in stream 1`);
|
||||
return chunk; // Fast processing
|
||||
},
|
||||
});
|
||||
const stream2 = new SmartDuplex({
|
||||
name: 'stream2',
|
||||
objectMode: true,
|
||||
writeFunction: async (chunk, tools) => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 20)); // Slow processing
|
||||
console.log(`processed chunk ${chunk} in stream 2`);
|
||||
await tools.push(chunk);
|
||||
// return chunk, optionally return ;
|
||||
},
|
||||
}); // This stream processes data more slowly
|
||||
const stream3 = new SmartDuplex({
|
||||
objectMode: true,
|
||||
name: 'stream3',
|
||||
writeFunction: async (chunk, tools) => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 200)); // Slow processing
|
||||
console.log(`processed chunk ${chunk} in stream 3`);
|
||||
},
|
||||
});
|
||||
|
||||
stream1.pipe(stream2).pipe(stream3);
|
||||
|
||||
let backpressured = false;
|
||||
for (let i = 0; i < 200; i++) {
|
||||
const canContinue = stream1.write(`Chunk ${i}`, 'utf8');
|
||||
if (!canContinue) {
|
||||
backpressured = true;
|
||||
console.log(`Backpressure at chunk ${i}`);
|
||||
}
|
||||
}
|
||||
|
||||
stream1.end();
|
||||
|
||||
stream1.on('finish', () => {
|
||||
console.log('Stream 1 finished processing.');
|
||||
});
|
||||
stream2.on('finish', () => {
|
||||
console.log('Stream 2 finished processing.');
|
||||
});
|
||||
stream3.on('finish', () => {
|
||||
console.log('Stream 3 finished processing.');
|
||||
if (!backpressured) {
|
||||
throw new Error('No backpressure was observed.');
|
||||
} else {
|
||||
done.resolve();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
testBackpressure();
|
||||
await done.promise;
|
||||
});
|
||||
|
||||
await tap.start();
|
24
test/test.ts.smartstream.ts
Normal file
24
test/test.ts.smartstream.ts
Normal file
@ -0,0 +1,24 @@
|
||||
import { expect, tap } from '@push.rocks/tapbundle';
|
||||
import { SmartDuplex } from '../ts/smartstream.classes.smartduplex.js'; // Adjust the import to your file structure
|
||||
import * as smartrx from '@push.rocks/smartrx';
|
||||
import * as fs from 'fs';
|
||||
|
||||
tap.test('should create a SmartStream from a Buffer', async () => {
|
||||
const bufferData = Buffer.from('This is a test buffer');
|
||||
const smartStream = SmartDuplex.fromBuffer(bufferData, {});
|
||||
|
||||
let receivedData = Buffer.alloc(0);
|
||||
|
||||
return new Promise<void>((resolve) => {
|
||||
smartStream.on('data', (chunk: Buffer) => {
|
||||
receivedData = Buffer.concat([receivedData, chunk]);
|
||||
});
|
||||
|
||||
smartStream.on('end', () => {
|
||||
expect(receivedData.toString()).toEqual(bufferData.toString());
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
tap.start();
|
65
test/test.ts.streamfunction.ts
Normal file
65
test/test.ts.streamfunction.ts
Normal file
@ -0,0 +1,65 @@
|
||||
import { expect, tap } from '@push.rocks/tapbundle';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
|
||||
import * as smartstream from '../ts/index.js';
|
||||
|
||||
let testIntake: smartstream.StreamIntake<string>;
|
||||
|
||||
tap.test('should handle a read stream', async (tools) => {
|
||||
const counter = 0;
|
||||
const streamWrapper = new smartstream.StreamWrapper([
|
||||
smartfile.fsStream.createReadStream('./test/assets/readabletext.txt'),
|
||||
new smartstream.SmartDuplex({
|
||||
writeFunction: async (chunkStringArg: Buffer, streamTools) => {
|
||||
// do something with the stream here
|
||||
const result = chunkStringArg.toString().substr(0, 100);
|
||||
streamTools.push('wow =========== \n');
|
||||
return Buffer.from(result);
|
||||
},
|
||||
finalFunction: async (tools) => {
|
||||
return Buffer.from('this is the end');
|
||||
},
|
||||
}),
|
||||
new smartstream.SmartDuplex({
|
||||
writeFunction: async (chunkStringArg) => {
|
||||
console.log(chunkStringArg.toString());
|
||||
},
|
||||
finalFunction: async (tools) => {
|
||||
tools.push(null);
|
||||
},
|
||||
})
|
||||
]);
|
||||
await streamWrapper.run();
|
||||
});
|
||||
|
||||
tap.test('should create a valid Intake', async (tools) => {
|
||||
testIntake = new smartstream.StreamIntake<string>();
|
||||
testIntake.pipe(
|
||||
new smartstream.SmartDuplex({
|
||||
objectMode: true,
|
||||
writeFunction: async (chunkStringArg: string, streamTools) => {
|
||||
await tools.delayFor(100);
|
||||
console.log(chunkStringArg);
|
||||
return chunkStringArg;
|
||||
}
|
||||
})
|
||||
)
|
||||
.pipe(smartfile.fsStream.createWriteStream('./test/assets/writabletext.txt'));
|
||||
const testFinished = tools.defer();
|
||||
let counter = 0;
|
||||
testIntake.pushNextObservable.subscribe(() => {
|
||||
if (counter < 50) {
|
||||
counter++;
|
||||
testIntake.pushData('hi');
|
||||
testIntake.pushData('+wow');
|
||||
testIntake.pushData('\n');
|
||||
} else {
|
||||
testIntake.signalEnd();
|
||||
testFinished.resolve();
|
||||
}
|
||||
});
|
||||
await testFinished.promise;
|
||||
testIntake.signalEnd();
|
||||
});
|
||||
|
||||
tap.start();
|
15
test/test.ts.ts
Normal file
15
test/test.ts.ts
Normal file
@ -0,0 +1,15 @@
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import { expect, tap } from '@push.rocks/tapbundle';
|
||||
|
||||
import * as smartstream from '../ts/smartstream.classes.streamwrapper.js';
|
||||
|
||||
let testSmartstream: smartstream.StreamWrapper;
|
||||
tap.test('should combine a stream', async () => {
|
||||
testSmartstream = new smartstream.StreamWrapper([
|
||||
smartfile.fsStream.createReadStream('./test/assets/test.md'),
|
||||
smartfile.fsStream.createWriteStream('./test/assets/testCopy.md'),
|
||||
]);
|
||||
await testSmartstream.run();
|
||||
});
|
||||
|
||||
tap.start();
|
70
test/test.ts_web.both.ts
Normal file
70
test/test.ts_web.both.ts
Normal file
@ -0,0 +1,70 @@
|
||||
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
|
||||
import * as webstream from '../ts_web/index.js';
|
||||
|
||||
tap.test('WebDuplexStream', async (toolsArg) => {
|
||||
const testDone = toolsArg.defer(); // Create a deferred object to control test completion.
|
||||
const inputUint8Array = new Uint8Array([1, 2, 3, 4, 5]);
|
||||
const stream = webstream.WebDuplexStream.fromUInt8Array(inputUint8Array);
|
||||
|
||||
const reader = stream.readable.getReader();
|
||||
let readUint8Array = new Uint8Array();
|
||||
|
||||
reader.read().then(function processText({ done, value }) {
|
||||
if (done) {
|
||||
expect(readUint8Array).toEqual(inputUint8Array);
|
||||
testDone.resolve(); // Correctly signal that the test is done.
|
||||
return;
|
||||
}
|
||||
readUint8Array = new Uint8Array([...readUint8Array, ...value]);
|
||||
return reader.read().then(processText);
|
||||
});
|
||||
|
||||
return testDone.promise; // Return the promise to properly wait for the test to complete.
|
||||
});
|
||||
|
||||
|
||||
tap.test('should handle transform with a write function', async (toolsArg) => {
|
||||
const testDone = toolsArg.defer();
|
||||
const input = [1, 2, 3, 4, 5];
|
||||
const expectedOutput = [2, 4, 6, 8, 10];
|
||||
|
||||
const transformStream = new webstream.WebDuplexStream<number, number>({
|
||||
writeFunction: (chunk, { push }) => {
|
||||
push(chunk * 2); // Push the doubled number into the stream
|
||||
return Promise.resolve(); // Resolve the promise immediately
|
||||
},
|
||||
});
|
||||
|
||||
const writableStream = transformStream.writable.getWriter();
|
||||
const readableStream = transformStream.readable.getReader();
|
||||
|
||||
const output: number[] = [];
|
||||
|
||||
// Process the text and resolve the test once done.
|
||||
const processText = async ({ done, value }) => {
|
||||
if (done) {
|
||||
expect(output).toEqual(expectedOutput);
|
||||
testDone.resolve(); // Resolve the deferred test once all values have been read.
|
||||
return;
|
||||
}
|
||||
if (value !== undefined) {
|
||||
output.push(value);
|
||||
}
|
||||
// Continue reading and processing.
|
||||
await readableStream.read().then(processText);
|
||||
};
|
||||
|
||||
// Start the read process before writing to the stream.
|
||||
readableStream.read().then(processText);
|
||||
|
||||
// Sequentially write to the stream and close when done.
|
||||
for (const num of input) {
|
||||
await writableStream.write(num);
|
||||
}
|
||||
await writableStream.close();
|
||||
|
||||
return testDone.promise; // This will wait until the testDone is resolved before completing the test.
|
||||
});
|
||||
|
||||
|
||||
tap.start();
|
8
ts/00_commitinfo_data.ts
Normal file
8
ts/00_commitinfo_data.ts
Normal file
@ -0,0 +1,8 @@
|
||||
/**
|
||||
* autocreated commitinfo by @pushrocks/commitinfo
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartstream',
|
||||
version: '3.0.41',
|
||||
description: 'A library to simplify the creation and manipulation of Node.js streams, providing utilities for handling transform, duplex, and readable/writable streams effectively in TypeScript.'
|
||||
}
|
109
ts/index.ts
109
ts/index.ts
@ -1,105 +1,8 @@
|
||||
import * as plugins from './smartstream.plugins'
|
||||
export * from './smartstream.classes.smartduplex.js';
|
||||
export * from './smartstream.classes.streamwrapper.js';
|
||||
export * from './smartstream.classes.streamintake.js';
|
||||
|
||||
// interfaces
|
||||
import { Transform } from 'stream'
|
||||
export * from './smartstream.functions.js';
|
||||
|
||||
export interface IErrorFunction {
|
||||
(err): any
|
||||
}
|
||||
|
||||
export interface ICustomEventFunction {
|
||||
(): any
|
||||
}
|
||||
|
||||
export interface ICustomEventObject {
|
||||
eventName: string
|
||||
eventFunction: ICustomEventFunction
|
||||
}
|
||||
|
||||
/**
|
||||
* class Smartstream handles
|
||||
*/
|
||||
export class Smartstream {
|
||||
private streamArray = []
|
||||
private customEventObjectArray: ICustomEventObject[] = []
|
||||
private streamStartedDeferred = plugins.q.defer()
|
||||
|
||||
/**
|
||||
* constructor
|
||||
*/
|
||||
constructor(streamArrayArg: any[]) {
|
||||
this.streamArray = streamArrayArg
|
||||
}
|
||||
|
||||
/**
|
||||
* make something with the stream itself
|
||||
*/
|
||||
streamStarted (): Promise<any> {
|
||||
return this.streamStartedDeferred.promise
|
||||
}
|
||||
|
||||
/**
|
||||
* attach listener to custom event
|
||||
*/
|
||||
onCustomEvent (eventNameArg: string, eventFunctionArg: ICustomEventFunction) {
|
||||
this.customEventObjectArray.push({
|
||||
eventName: eventNameArg,
|
||||
eventFunction: eventFunctionArg
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* run the stream
|
||||
* @returns Promise
|
||||
*/
|
||||
run (): Promise<void> {
|
||||
let done = plugins.q.defer<void>()
|
||||
|
||||
// clone Array
|
||||
let streamExecutionArray = []
|
||||
for (let streamItem of this.streamArray) { streamExecutionArray.push(streamItem) }
|
||||
|
||||
// combine the stream
|
||||
let finalStream = null
|
||||
let firstIteration: boolean = true
|
||||
for (let stream of streamExecutionArray) {
|
||||
if (firstIteration === true) {
|
||||
finalStream = stream
|
||||
}
|
||||
stream.on('error', (err) => {
|
||||
done.reject(err)
|
||||
})
|
||||
for (let customEventObject of this.customEventObjectArray) {
|
||||
stream.on(customEventObject.eventName, customEventObject.eventFunction)
|
||||
}
|
||||
if (!firstIteration) {
|
||||
finalStream = finalStream.pipe(stream)
|
||||
}
|
||||
firstIteration = false
|
||||
}
|
||||
|
||||
this.streamStartedDeferred.resolve()
|
||||
|
||||
finalStream.on('end', function () {
|
||||
done.resolve()
|
||||
})
|
||||
finalStream.on('close', function () {
|
||||
done.resolve()
|
||||
})
|
||||
finalStream.on('finish', function () {
|
||||
done.resolve()
|
||||
})
|
||||
return done.promise
|
||||
}
|
||||
}
|
||||
|
||||
export let cleanPipe = () => {
|
||||
return plugins.through2.obj(
|
||||
(file, enc, cb) => {
|
||||
cb()
|
||||
},
|
||||
(cb) => {
|
||||
cb()
|
||||
}
|
||||
)
|
||||
}
|
||||
import * as plugins from './smartstream.plugins.js';
|
||||
export const webstream = plugins.webstream;
|
||||
|
209
ts/smartstream.classes.smartduplex.ts
Normal file
209
ts/smartstream.classes.smartduplex.ts
Normal file
@ -0,0 +1,209 @@
|
||||
import * as plugins from './smartstream.plugins.js';
|
||||
import { Duplex, type DuplexOptions } from 'stream';
|
||||
|
||||
export interface IStreamTools {
|
||||
truncate: () => void;
|
||||
push: (pipeObject: any) => Promise<boolean>;
|
||||
}
|
||||
|
||||
export interface IStreamWriteFunction<T, rT> {
|
||||
(chunkArg: T, toolsArg: IStreamTools): Promise<rT>;
|
||||
}
|
||||
|
||||
export interface IStreamFinalFunction<rT> {
|
||||
(toolsArg: IStreamTools): Promise<rT>;
|
||||
}
|
||||
|
||||
export interface ISmartDuplexOptions<TInput, TOutput> extends DuplexOptions {
|
||||
/**
|
||||
* wether to print debug logs
|
||||
*/
|
||||
debug?: boolean;
|
||||
/**
|
||||
* the name of the stream
|
||||
*/
|
||||
name?: string;
|
||||
/**
|
||||
* a function that is being called to read more stuff from whereever to be processed by the stream
|
||||
* @returns
|
||||
*/
|
||||
readFunction?: () => Promise<void>;
|
||||
|
||||
/**
|
||||
* the write function is called for every chunk that is being written to the stream
|
||||
* it can push or return chunks (but does not have to) to be written to the readable side of the stream
|
||||
*/
|
||||
writeFunction?: IStreamWriteFunction<TInput, TOutput>;
|
||||
|
||||
/**
|
||||
* a final function that is run at the end of the stream
|
||||
*/
|
||||
finalFunction?: IStreamFinalFunction<TOutput>;
|
||||
}
|
||||
|
||||
export class SmartDuplex<TInput = any, TOutput = any> extends Duplex {
|
||||
// STATIC
|
||||
static fromBuffer(buffer: Buffer, options?: ISmartDuplexOptions<any, any>): SmartDuplex {
|
||||
const smartDuplex = new SmartDuplex(options);
|
||||
process.nextTick(() => {
|
||||
smartDuplex.push(buffer);
|
||||
smartDuplex.push(null); // Signal the end of the data
|
||||
});
|
||||
return smartDuplex;
|
||||
}
|
||||
|
||||
// INSTANCE
|
||||
private backpressuredArray: plugins.lik.BackpressuredArray<TOutput>;
|
||||
public options: ISmartDuplexOptions<TInput, TOutput>;
|
||||
private observableSubscription?: plugins.smartrx.rxjs.Subscription;
|
||||
private debugLog(messageArg: string) {
|
||||
if (this.options.debug) {
|
||||
console.log(messageArg);
|
||||
}
|
||||
}
|
||||
|
||||
constructor(optionsArg?: ISmartDuplexOptions<TInput, TOutput>) {
|
||||
super(Object.assign({
|
||||
highWaterMark: 1,
|
||||
}, optionsArg));
|
||||
this.options = optionsArg;
|
||||
this.backpressuredArray = new plugins.lik.BackpressuredArray<TOutput>(this.options.highWaterMark || 1)
|
||||
}
|
||||
|
||||
public async _read(size: number): Promise<void> {
|
||||
this.debugLog(`${this.options.name}: read was called`);
|
||||
await this.backpressuredArray.waitForItems();
|
||||
this.debugLog(`${this.options.name}: successfully waited for items.`);
|
||||
if (this.options.readFunction) {
|
||||
await this.options.readFunction();
|
||||
}
|
||||
let canPushMore = true;
|
||||
while(this.backpressuredArray.data.length > 0 && canPushMore) {
|
||||
const nextChunk = this.backpressuredArray.shift();
|
||||
canPushMore = this.push(nextChunk);
|
||||
}
|
||||
}
|
||||
|
||||
public async backpressuredPush (pushArg: TOutput) {
|
||||
const canPushMore = this.backpressuredArray.push(pushArg);
|
||||
if (!canPushMore) {
|
||||
this.debugLog(`${this.options.name}: cannot push more`);
|
||||
await this.backpressuredArray.waitForSpace();
|
||||
this.debugLog(`${this.options.name}: can push more again`);
|
||||
}
|
||||
return canPushMore;
|
||||
};
|
||||
|
||||
private asyncWritePromiseObjectmap = new plugins.lik.ObjectMap<Promise<any>>();
|
||||
// Ensure the _write method types the chunk as TInput and encodes TOutput
|
||||
public async _write(chunk: TInput, encoding: string, callback: (error?: Error | null) => void) {
|
||||
if (!this.options.writeFunction) {
|
||||
return callback(new Error('No stream function provided'));
|
||||
}
|
||||
|
||||
let isTruncated = false;
|
||||
const tools: IStreamTools = {
|
||||
truncate: () => {
|
||||
this.push(null);
|
||||
isTruncated = true;
|
||||
callback();
|
||||
},
|
||||
push: async (pushArg: TOutput) => {
|
||||
return await this.backpressuredPush(pushArg);
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
const writeDeferred = plugins.smartpromise.defer();
|
||||
this.asyncWritePromiseObjectmap.add(writeDeferred.promise);
|
||||
const modifiedChunk = await this.options.writeFunction(chunk, tools);
|
||||
if (isTruncated) {
|
||||
return;
|
||||
}
|
||||
if (modifiedChunk) {
|
||||
await tools.push(modifiedChunk);
|
||||
}
|
||||
callback();
|
||||
writeDeferred.resolve();
|
||||
writeDeferred.promise.then(() => {
|
||||
this.asyncWritePromiseObjectmap.remove(writeDeferred.promise);
|
||||
});
|
||||
} catch (err) {
|
||||
callback(err);
|
||||
}
|
||||
}
|
||||
|
||||
public async _final(callback: (error?: Error | null) => void) {
|
||||
await Promise.all(this.asyncWritePromiseObjectmap.getArray());
|
||||
if (this.options.finalFunction) {
|
||||
const tools: IStreamTools = {
|
||||
truncate: () => callback(),
|
||||
push: async (pipeObject) => {
|
||||
return this.backpressuredArray.push(pipeObject);
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
const finalChunk = await this.options.finalFunction(tools);
|
||||
if (finalChunk) {
|
||||
this.backpressuredArray.push(finalChunk);
|
||||
}
|
||||
} catch (err) {
|
||||
this.backpressuredArray.push(null);
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
}
|
||||
this.backpressuredArray.push(null);
|
||||
callback();
|
||||
}
|
||||
|
||||
public async getWebStreams(): Promise<{ readable: ReadableStream, writable: WritableStream }> {
|
||||
const duplex = this;
|
||||
const readable = new ReadableStream({
|
||||
start(controller) {
|
||||
duplex.on('readable', () => {
|
||||
let chunk;
|
||||
while (null !== (chunk = duplex.read())) {
|
||||
controller.enqueue(chunk);
|
||||
}
|
||||
});
|
||||
|
||||
duplex.on('end', () => {
|
||||
controller.close();
|
||||
});
|
||||
},
|
||||
cancel(reason) {
|
||||
duplex.destroy(new Error(reason));
|
||||
}
|
||||
});
|
||||
|
||||
const writable = new WritableStream({
|
||||
write(chunk) {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const isBackpressured = !duplex.write(chunk, (error) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
|
||||
if (isBackpressured) {
|
||||
duplex.once('drain', resolve);
|
||||
}
|
||||
});
|
||||
},
|
||||
close() {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
duplex.end(resolve);
|
||||
});
|
||||
},
|
||||
abort(reason) {
|
||||
duplex.destroy(new Error(reason));
|
||||
}
|
||||
});
|
||||
|
||||
return { readable, writable };
|
||||
}
|
||||
}
|
52
ts/smartstream.classes.streamintake.ts
Normal file
52
ts/smartstream.classes.streamintake.ts
Normal file
@ -0,0 +1,52 @@
|
||||
import * as plugins from './smartstream.plugins.js';
|
||||
|
||||
export class StreamIntake<T> extends plugins.stream.Readable {
|
||||
private signalEndBoolean = false;
|
||||
private chunkStore: T[] = [];
|
||||
public pushNextObservable = new plugins.smartrx.ObservableIntake<any>();
|
||||
private pushedNextDeferred = plugins.smartpromise.defer();
|
||||
|
||||
constructor(options?: plugins.stream.ReadableOptions) {
|
||||
super({ ...options, objectMode: true }); // Ensure that we are in object mode.
|
||||
this.pushNextObservable.push('please push next');
|
||||
}
|
||||
|
||||
_read(size: number): void {
|
||||
// console.log('get next');
|
||||
const pushChunk = (): void => {
|
||||
while (this.chunkStore.length > 0) {
|
||||
// If push returns false, then we should stop reading
|
||||
if (!this.push(this.chunkStore.shift())) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.chunkStore.length === 0) {
|
||||
if (this.signalEndBoolean) {
|
||||
// If we're done, push null to signal the end of the stream
|
||||
this.push(null);
|
||||
} else {
|
||||
// Ask for more data and wait
|
||||
this.pushNextObservable.push('please push next');
|
||||
this.pushedNextDeferred.promise.then(() => {
|
||||
this.pushedNextDeferred = plugins.smartpromise.defer(); // Reset the deferred
|
||||
pushChunk(); // Try pushing the next chunk
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pushChunk();
|
||||
}
|
||||
|
||||
public pushData(chunkData: T) {
|
||||
this.chunkStore.push(chunkData);
|
||||
this.pushedNextDeferred.resolve();
|
||||
}
|
||||
|
||||
public signalEnd() {
|
||||
this.signalEndBoolean = true;
|
||||
this.pushedNextDeferred.resolve();
|
||||
this.pushNextObservable.signalComplete();
|
||||
}
|
||||
}
|
96
ts/smartstream.classes.streamwrapper.ts
Normal file
96
ts/smartstream.classes.streamwrapper.ts
Normal file
@ -0,0 +1,96 @@
|
||||
import * as plugins from './smartstream.plugins.js';
|
||||
|
||||
// interfaces
|
||||
import { Transform } from 'stream';
|
||||
|
||||
export interface IErrorFunction {
|
||||
(err: Error): any;
|
||||
}
|
||||
|
||||
export interface ICustomEventFunction {
|
||||
(): any;
|
||||
}
|
||||
|
||||
export interface ICustomEventObject {
|
||||
eventName: string;
|
||||
eventFunction: ICustomEventFunction;
|
||||
}
|
||||
|
||||
/**
|
||||
* class Smartstream handles
|
||||
*/
|
||||
export class StreamWrapper {
|
||||
private streamArray: Array<plugins.stream.Duplex> = [];
|
||||
private customEventObjectArray: ICustomEventObject[] = [];
|
||||
private streamStartedDeferred = plugins.smartpromise.defer();
|
||||
|
||||
/**
|
||||
* constructor
|
||||
*/
|
||||
constructor(streamArrayArg: any[]) {
|
||||
this.streamArray = streamArrayArg;
|
||||
}
|
||||
|
||||
/**
|
||||
* make something with the stream itself
|
||||
*/
|
||||
streamStarted(): Promise<any> {
|
||||
return this.streamStartedDeferred.promise;
|
||||
}
|
||||
|
||||
/**
|
||||
* attach listener to custom event
|
||||
*/
|
||||
onCustomEvent(eventNameArg: string, eventFunctionArg: ICustomEventFunction) {
|
||||
this.customEventObjectArray.push({
|
||||
eventName: eventNameArg,
|
||||
eventFunction: eventFunctionArg,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* run the stream
|
||||
* @returns Promise
|
||||
*/
|
||||
run(): Promise<void> {
|
||||
const done = plugins.smartpromise.defer<void>();
|
||||
|
||||
// clone Array
|
||||
const streamExecutionArray: Array<plugins.stream.Duplex> = [];
|
||||
for (const streamItem of this.streamArray) {
|
||||
streamExecutionArray.push(streamItem);
|
||||
}
|
||||
|
||||
// combine the stream
|
||||
let finalStream = null;
|
||||
let firstIteration: boolean = true;
|
||||
for (const stream of streamExecutionArray) {
|
||||
if (firstIteration === true) {
|
||||
finalStream = stream;
|
||||
}
|
||||
stream.on('error', (err) => {
|
||||
done.reject(err);
|
||||
});
|
||||
for (const customEventObject of this.customEventObjectArray) {
|
||||
stream.on(customEventObject.eventName, customEventObject.eventFunction);
|
||||
}
|
||||
if (!firstIteration) {
|
||||
finalStream = finalStream.pipe(stream);
|
||||
}
|
||||
firstIteration = false;
|
||||
}
|
||||
|
||||
this.streamStartedDeferred.resolve();
|
||||
|
||||
finalStream.on('end', () => {
|
||||
done.resolve();
|
||||
});
|
||||
finalStream.on('close', () => {
|
||||
done.resolve();
|
||||
});
|
||||
finalStream.on('finish', () => {
|
||||
done.resolve();
|
||||
});
|
||||
return done.promise;
|
||||
}
|
||||
}
|
30
ts/smartstream.functions.ts
Normal file
30
ts/smartstream.functions.ts
Normal file
@ -0,0 +1,30 @@
|
||||
import { Transform, type TransformCallback, type TransformOptions } from 'stream';
|
||||
import { SmartDuplex } from './smartstream.classes.smartduplex.js';
|
||||
|
||||
export interface AsyncTransformFunction<TInput, TOutput> {
|
||||
(chunkArg: TInput): Promise<TOutput>;
|
||||
}
|
||||
|
||||
export function createTransformFunction<TInput, TOutput>(
|
||||
asyncFunction: AsyncTransformFunction<TInput, TOutput>,
|
||||
options?: TransformOptions
|
||||
): SmartDuplex {
|
||||
const smartDuplexStream = new SmartDuplex({
|
||||
...options,
|
||||
writeFunction: async (chunkArg, toolsArg) => {
|
||||
const result = await asyncFunction(chunkArg);
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
||||
return smartDuplexStream;
|
||||
}
|
||||
|
||||
export const createPassThrough = () => {
|
||||
return new SmartDuplex({
|
||||
objectMode: true,
|
||||
writeFunction: async (chunkArg, toolsArg) => {
|
||||
return chunkArg;
|
||||
}
|
||||
})
|
||||
}
|
@ -1,3 +1,13 @@
|
||||
import 'typings-global'
|
||||
export import q = require('smartq')
|
||||
export import through2 = require('through2')
|
||||
// node native
|
||||
import * as stream from 'stream';
|
||||
|
||||
export { stream };
|
||||
|
||||
// pushrocks scope
|
||||
import * as lik from '@push.rocks/lik';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smartrx from '@push.rocks/smartrx';
|
||||
import * as webstream from '../dist_ts_web/index.js';
|
||||
|
||||
export { lik, smartpromise, smartrx, webstream };
|
||||
|
||||
|
8
ts_web/00_commitinfo_data.ts
Normal file
8
ts_web/00_commitinfo_data.ts
Normal file
@ -0,0 +1,8 @@
|
||||
/**
|
||||
* autocreated commitinfo by @pushrocks/commitinfo
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartstream',
|
||||
version: '3.0.41',
|
||||
description: 'A library to simplify the creation and manipulation of Node.js streams, providing utilities for handling transform, duplex, and readable/writable streams effectively in TypeScript.'
|
||||
}
|
156
ts_web/classes.webduplexstream.ts
Normal file
156
ts_web/classes.webduplexstream.ts
Normal file
@ -0,0 +1,156 @@
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
|
||||
// ========================================
|
||||
// READ
|
||||
// ========================================
|
||||
export interface IStreamToolsRead<TInput, TOutput> {
|
||||
done: () => void;
|
||||
write: (writeArg: TInput) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* the read function is called anytime
|
||||
* -> the WebDuplexStream is being read from
|
||||
* and at the same time if nothing is enqueued
|
||||
*/
|
||||
export interface IStreamReadFunction<TInput, TOutput> {
|
||||
(toolsArg: IStreamToolsRead<TInput, TOutput>): Promise<void>;
|
||||
}
|
||||
|
||||
// ========================================
|
||||
// WRITE
|
||||
// ========================================
|
||||
export interface IStreamToolsWrite<TInput, TOutput> {
|
||||
truncate: () => void;
|
||||
push: (pushArg: TOutput) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* the write function can return something.
|
||||
* It is called anytime a chunk is written to the stream.
|
||||
*/
|
||||
export interface IStreamWriteFunction<TInput, TOutput> {
|
||||
(chunkArg: TInput, toolsArg: IStreamToolsWrite<TInput, TOutput>): Promise<any>;
|
||||
}
|
||||
|
||||
export interface IStreamFinalFunction<TInput, TOutput> {
|
||||
(toolsArg: IStreamToolsWrite<TInput, TOutput>): Promise<TOutput>;
|
||||
}
|
||||
|
||||
export interface WebDuplexStreamOptions<TInput, TOutput> {
|
||||
readFunction?: IStreamReadFunction<TInput, TOutput>;
|
||||
writeFunction?: IStreamWriteFunction<TInput, TOutput>;
|
||||
finalFunction?: IStreamFinalFunction<TInput, TOutput>;
|
||||
}
|
||||
|
||||
export class WebDuplexStream<TInput = any, TOutput = any> extends TransformStream<TInput, TOutput> {
|
||||
static fromUInt8Array(uint8Array: Uint8Array): WebDuplexStream<Uint8Array, Uint8Array> {
|
||||
const stream = new WebDuplexStream<Uint8Array, Uint8Array>({
|
||||
writeFunction: async (chunk, { push }) => {
|
||||
push(chunk); // Directly push the chunk as is
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
const writer = stream.writable.getWriter();
|
||||
writer.write(uint8Array).then(() => writer.close());
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
// INSTANCE
|
||||
options: WebDuplexStreamOptions<TInput, TOutput>;
|
||||
|
||||
constructor(optionsArg: WebDuplexStreamOptions<TInput, TOutput>) {
|
||||
super({
|
||||
async transform(chunk, controller) {
|
||||
// Transformation logic remains unchanged
|
||||
if (optionsArg?.writeFunction) {
|
||||
const tools: IStreamToolsWrite<TInput, TOutput> = {
|
||||
truncate: () => controller.terminate(),
|
||||
push: (pushArg: TOutput) => controller.enqueue(pushArg),
|
||||
};
|
||||
|
||||
optionsArg.writeFunction(chunk, tools)
|
||||
.then(writeReturnChunk => {
|
||||
// the write return chunk is optional
|
||||
// just in case the write function returns something other than void.
|
||||
if (writeReturnChunk) {
|
||||
controller.enqueue(writeReturnChunk);
|
||||
}
|
||||
})
|
||||
.catch(err => controller.error(err));
|
||||
} else {
|
||||
controller.error(new Error('No write function provided'));
|
||||
}
|
||||
},
|
||||
async flush(controller) {
|
||||
// Flush logic remains unchanged
|
||||
if (optionsArg?.finalFunction) {
|
||||
const tools: IStreamToolsWrite<TInput, TOutput> = {
|
||||
truncate: () => controller.terminate(),
|
||||
push: (pipeObject) => controller.enqueue(pipeObject),
|
||||
};
|
||||
|
||||
optionsArg.finalFunction(tools)
|
||||
.then(finalChunk => {
|
||||
if (finalChunk) {
|
||||
controller.enqueue(finalChunk);
|
||||
}
|
||||
})
|
||||
.catch(err => controller.error(err))
|
||||
.finally(() => controller.terminate());
|
||||
} else {
|
||||
controller.terminate();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
this.options = optionsArg;
|
||||
}
|
||||
|
||||
// Method to create a custom readable stream that integrates the readFunction
|
||||
// readFunction is executed whenever the stream is being read from and nothing is enqueued
|
||||
getCustomReadableStream() {
|
||||
const readableStream = this.readable;
|
||||
const options = this.options;
|
||||
const customReadable = new ReadableStream({
|
||||
async pull(controller) {
|
||||
const reader = readableStream.getReader();
|
||||
|
||||
// Check the current state of the original stream
|
||||
const { value, done } = await reader.read();
|
||||
reader.releaseLock();
|
||||
|
||||
if (done) {
|
||||
// If the original stream is done, close the custom readable stream
|
||||
controller.close();
|
||||
} else {
|
||||
if (value) {
|
||||
// If there is data in the original stream, enqueue it and do not execute the readFunction
|
||||
controller.enqueue(value);
|
||||
} else if (options.readFunction) {
|
||||
// If the original stream is empty, execute the readFunction and read again
|
||||
await options.readFunction({
|
||||
done: () => controller.close(),
|
||||
write: (writeArg) => controller.enqueue(writeArg),
|
||||
});
|
||||
|
||||
const newReader = readableStream.getReader();
|
||||
const { value: newValue, done: newDone } = await newReader.read();
|
||||
newReader.releaseLock();
|
||||
|
||||
if (newDone) {
|
||||
controller.close();
|
||||
} else {
|
||||
controller.enqueue(newValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return customReadable;
|
||||
}
|
||||
}
|
2
ts_web/index.ts
Normal file
2
ts_web/index.ts
Normal file
@ -0,0 +1,2 @@
|
||||
import './plugins.js';
|
||||
export * from './classes.webduplexstream.js';
|
15
ts_web/plugins.ts
Normal file
15
ts_web/plugins.ts
Normal file
@ -0,0 +1,15 @@
|
||||
// @push.rocks scope
|
||||
import * as smartenv from '@push.rocks/smartenv';
|
||||
|
||||
export {
|
||||
smartenv,
|
||||
}
|
||||
|
||||
// lets setup dependencies
|
||||
const smartenvInstance = new smartenv.Smartenv();
|
||||
|
||||
await smartenvInstance.getSafeNodeModule<typeof import('stream/web')>('stream/web', async (moduleArg) => {
|
||||
globalThis.ReadableStream = moduleArg.ReadableStream;
|
||||
globalThis.WritableStream = moduleArg.WritableStream;
|
||||
globalThis.TransformStream = moduleArg.TransformStream;
|
||||
})
|
14
tsconfig.json
Normal file
14
tsconfig.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"experimentalDecorators": true,
|
||||
"useDefineForClassFields": false,
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"esModuleInterop": true,
|
||||
"verbatimModuleSyntax": true
|
||||
},
|
||||
"exclude": [
|
||||
"dist_*/**/*.d.ts"
|
||||
]
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
{
|
||||
"extends": "tslint-config-standard"
|
||||
}
|
439
yarn.lock
439
yarn.lock
@ -1,439 +0,0 @@
|
||||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
"@types/chai-as-promised@0.0.29":
|
||||
version "0.0.29"
|
||||
resolved "https://registry.yarnpkg.com/@types/chai-as-promised/-/chai-as-promised-0.0.29.tgz#43d52892aa998e185a3de3e2477edb8573be1d77"
|
||||
dependencies:
|
||||
"@types/chai" "*"
|
||||
"@types/promises-a-plus" "*"
|
||||
|
||||
"@types/chai-string@^1.1.30":
|
||||
version "1.1.30"
|
||||
resolved "https://registry.yarnpkg.com/@types/chai-string/-/chai-string-1.1.30.tgz#4d8744b31a5a2295fc01c981ed1e2d4c8a070f0a"
|
||||
dependencies:
|
||||
"@types/chai" "*"
|
||||
|
||||
"@types/chai@*":
|
||||
version "4.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.0.1.tgz#37fea779617cfec3fd2b19a0247e8bbdd5133bf6"
|
||||
|
||||
"@types/chai@^3.4.35":
|
||||
version "3.5.2"
|
||||
resolved "https://registry.yarnpkg.com/@types/chai/-/chai-3.5.2.tgz#c11cd2817d3a401b7ba0f5a420f35c56139b1c1e"
|
||||
|
||||
"@types/node@*":
|
||||
version "8.0.6"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-8.0.6.tgz#ed2c3e011cb51ccd3cf874989130f1b9ffe06069"
|
||||
|
||||
"@types/promises-a-plus@*":
|
||||
version "0.0.27"
|
||||
resolved "https://registry.yarnpkg.com/@types/promises-a-plus/-/promises-a-plus-0.0.27.tgz#c64651134614c84b8f5d7114ce8901d36a609780"
|
||||
|
||||
"@types/shelljs@^0.7.2":
|
||||
version "0.7.2"
|
||||
resolved "https://registry.yarnpkg.com/@types/shelljs/-/shelljs-0.7.2.tgz#c2bdb3fe80cd7a3da08750ca898ae44c589671f3"
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/through2@^2.0.33":
|
||||
version "2.0.33"
|
||||
resolved "https://registry.yarnpkg.com/@types/through2/-/through2-2.0.33.tgz#1ff2e88a100dfb5b140e7bb98791f1194400d131"
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/which@^1.0.28":
|
||||
version "1.0.28"
|
||||
resolved "https://registry.yarnpkg.com/@types/which/-/which-1.0.28.tgz#016e387629b8817bed653fe32eab5d11279c8df6"
|
||||
|
||||
ansi-256-colors@^1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/ansi-256-colors/-/ansi-256-colors-1.1.0.tgz#910de50efcc7c09e3d82f2f87abd6b700c18818a"
|
||||
|
||||
assertion-error@^1.0.1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-1.0.2.tgz#13ca515d86206da0bac66e834dd397d87581094c"
|
||||
|
||||
balanced-match@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767"
|
||||
|
||||
beautycolor@^1.0.7:
|
||||
version "1.0.7"
|
||||
resolved "https://registry.yarnpkg.com/beautycolor/-/beautycolor-1.0.7.tgz#a4715738ac4c8221371e9cbeb5a6cc6d11ecbf7c"
|
||||
dependencies:
|
||||
ansi-256-colors "^1.1.0"
|
||||
typings-global "^1.0.14"
|
||||
|
||||
bindings@^1.2.1:
|
||||
version "1.2.1"
|
||||
resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.2.1.tgz#14ad6113812d2d37d72e67b4cacb4bb726505f11"
|
||||
|
||||
brace-expansion@^1.1.7:
|
||||
version "1.1.8"
|
||||
resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.8.tgz#c07b211c7c952ec1f8efd51a77ef0d1d3990a292"
|
||||
dependencies:
|
||||
balanced-match "^1.0.0"
|
||||
concat-map "0.0.1"
|
||||
|
||||
chai-as-promised@^6.0.0:
|
||||
version "6.0.0"
|
||||
resolved "https://registry.yarnpkg.com/chai-as-promised/-/chai-as-promised-6.0.0.tgz#1a02a433a6f24dafac63b9c96fa1684db1aa8da6"
|
||||
dependencies:
|
||||
check-error "^1.0.2"
|
||||
|
||||
chai-string@^1.3.0:
|
||||
version "1.4.0"
|
||||
resolved "https://registry.yarnpkg.com/chai-string/-/chai-string-1.4.0.tgz#359140c051d36a4e4b1a5fc6b910152f438a8d49"
|
||||
|
||||
chai@^3.5.0:
|
||||
version "3.5.0"
|
||||
resolved "https://registry.yarnpkg.com/chai/-/chai-3.5.0.tgz#4d02637b067fe958bdbfdd3a40ec56fef7373247"
|
||||
dependencies:
|
||||
assertion-error "^1.0.1"
|
||||
deep-eql "^0.1.3"
|
||||
type-detect "^1.0.0"
|
||||
|
||||
check-error@^1.0.2:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/check-error/-/check-error-1.0.2.tgz#574d312edd88bb5dd8912e9286dd6c0aed4aac82"
|
||||
|
||||
concat-map@0.0.1:
|
||||
version "0.0.1"
|
||||
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
|
||||
|
||||
core-util-is@~1.0.0:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
|
||||
|
||||
deep-eql@^0.1.3:
|
||||
version "0.1.3"
|
||||
resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-0.1.3.tgz#ef558acab8de25206cd713906d74e56930eb69f2"
|
||||
dependencies:
|
||||
type-detect "0.1.1"
|
||||
|
||||
define-properties@^1.1.2:
|
||||
version "1.1.2"
|
||||
resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.2.tgz#83a73f2fea569898fb737193c8f873caf6d45c94"
|
||||
dependencies:
|
||||
foreach "^2.0.5"
|
||||
object-keys "^1.0.8"
|
||||
|
||||
early@^2.1.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/early/-/early-2.1.1.tgz#841e23254ea5dc54d8afaeee82f5ab65c00ee23c"
|
||||
dependencies:
|
||||
beautycolor "^1.0.7"
|
||||
smartq "^1.1.1"
|
||||
typings-global "^1.0.16"
|
||||
|
||||
es-abstract@^1.5.1:
|
||||
version "1.7.0"
|
||||
resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.7.0.tgz#dfade774e01bfcd97f96180298c449c8623fb94c"
|
||||
dependencies:
|
||||
es-to-primitive "^1.1.1"
|
||||
function-bind "^1.1.0"
|
||||
is-callable "^1.1.3"
|
||||
is-regex "^1.0.3"
|
||||
|
||||
es-to-primitive@^1.1.1:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.1.1.tgz#45355248a88979034b6792e19bb81f2b7975dd0d"
|
||||
dependencies:
|
||||
is-callable "^1.1.1"
|
||||
is-date-object "^1.0.1"
|
||||
is-symbol "^1.0.1"
|
||||
|
||||
es6-error@^4.0.2:
|
||||
version "4.0.2"
|
||||
resolved "https://registry.yarnpkg.com/es6-error/-/es6-error-4.0.2.tgz#eec5c726eacef51b7f6b73c20db6e1b13b069c98"
|
||||
|
||||
foreach@^2.0.5:
|
||||
version "2.0.5"
|
||||
resolved "https://registry.yarnpkg.com/foreach/-/foreach-2.0.5.tgz#0bee005018aeb260d0a3af3ae658dd0136ec1b99"
|
||||
|
||||
fs.realpath@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
|
||||
|
||||
function-bind@^1.0.2, function-bind@^1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.0.tgz#16176714c801798e4e8f2cf7f7529467bb4a5771"
|
||||
|
||||
glob@^7.0.0:
|
||||
version "7.1.2"
|
||||
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15"
|
||||
dependencies:
|
||||
fs.realpath "^1.0.0"
|
||||
inflight "^1.0.4"
|
||||
inherits "2"
|
||||
minimatch "^3.0.4"
|
||||
once "^1.3.0"
|
||||
path-is-absolute "^1.0.0"
|
||||
|
||||
has@^1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/has/-/has-1.0.1.tgz#8461733f538b0837c9361e39a9ab9e9704dc2f28"
|
||||
dependencies:
|
||||
function-bind "^1.0.2"
|
||||
|
||||
inflight@^1.0.4:
|
||||
version "1.0.6"
|
||||
resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
|
||||
dependencies:
|
||||
once "^1.3.0"
|
||||
wrappy "1"
|
||||
|
||||
inherits@2, inherits@~2.0.3:
|
||||
version "2.0.3"
|
||||
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
|
||||
|
||||
interpret@^1.0.0:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.0.3.tgz#cbc35c62eeee73f19ab7b10a801511401afc0f90"
|
||||
|
||||
is-callable@^1.1.1, is-callable@^1.1.3:
|
||||
version "1.1.3"
|
||||
resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.3.tgz#86eb75392805ddc33af71c92a0eedf74ee7604b2"
|
||||
|
||||
is-date-object@^1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16"
|
||||
|
||||
is-regex@^1.0.3:
|
||||
version "1.0.4"
|
||||
resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491"
|
||||
dependencies:
|
||||
has "^1.0.1"
|
||||
|
||||
is-symbol@^1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.1.tgz#3cc59f00025194b6ab2e38dbae6689256b660572"
|
||||
|
||||
isarray@~1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
|
||||
|
||||
isexe@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
|
||||
|
||||
leakage@^0.3.0:
|
||||
version "0.3.0"
|
||||
resolved "https://registry.yarnpkg.com/leakage/-/leakage-0.3.0.tgz#15d698abdc76bbc6439601f4f3020e77e2d50c39"
|
||||
dependencies:
|
||||
es6-error "^4.0.2"
|
||||
left-pad "^1.1.3"
|
||||
memwatch-next "^0.3.0"
|
||||
minimist "^1.2.0"
|
||||
pretty-bytes "^4.0.2"
|
||||
|
||||
left-pad@^1.1.3:
|
||||
version "1.1.3"
|
||||
resolved "https://registry.yarnpkg.com/left-pad/-/left-pad-1.1.3.tgz#612f61c033f3a9e08e939f1caebeea41b6f3199a"
|
||||
|
||||
memwatch-next@^0.3.0:
|
||||
version "0.3.0"
|
||||
resolved "https://registry.yarnpkg.com/memwatch-next/-/memwatch-next-0.3.0.tgz#2111050f9a906e0aa2d72a4ec0f0089c78726f8f"
|
||||
dependencies:
|
||||
bindings "^1.2.1"
|
||||
nan "^2.3.2"
|
||||
|
||||
minimatch@^3.0.4:
|
||||
version "3.0.4"
|
||||
resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083"
|
||||
dependencies:
|
||||
brace-expansion "^1.1.7"
|
||||
|
||||
minimist@^1.2.0:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284"
|
||||
|
||||
nan@^2.3.2:
|
||||
version "2.6.2"
|
||||
resolved "https://registry.yarnpkg.com/nan/-/nan-2.6.2.tgz#e4ff34e6c95fdfb5aecc08de6596f43605a7db45"
|
||||
|
||||
object-keys@^1.0.8:
|
||||
version "1.0.11"
|
||||
resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.0.11.tgz#c54601778ad560f1142ce0e01bcca8b56d13426d"
|
||||
|
||||
object.getownpropertydescriptors@^2.0.3:
|
||||
version "2.0.3"
|
||||
resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz#8758c846f5b407adab0f236e0986f14b051caa16"
|
||||
dependencies:
|
||||
define-properties "^1.1.2"
|
||||
es-abstract "^1.5.1"
|
||||
|
||||
once@^1.3.0:
|
||||
version "1.4.0"
|
||||
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
|
||||
dependencies:
|
||||
wrappy "1"
|
||||
|
||||
path-is-absolute@^1.0.0:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
|
||||
|
||||
path-parse@^1.0.5:
|
||||
version "1.0.5"
|
||||
resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.5.tgz#3c1adf871ea9cd6c9431b6ea2bd74a0ff055c4c1"
|
||||
|
||||
pretty-bytes@^4.0.2:
|
||||
version "4.0.2"
|
||||
resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-4.0.2.tgz#b2bf82e7350d65c6c33aa95aaa5a4f6327f61cd9"
|
||||
|
||||
process-nextick-args@~1.0.6:
|
||||
version "1.0.7"
|
||||
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-1.0.7.tgz#150e20b756590ad3f91093f25a4f2ad8bff30ba3"
|
||||
|
||||
readable-stream@^2.1.5:
|
||||
version "2.3.3"
|
||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.3.tgz#368f2512d79f9d46fdfc71349ae7878bbc1eb95c"
|
||||
dependencies:
|
||||
core-util-is "~1.0.0"
|
||||
inherits "~2.0.3"
|
||||
isarray "~1.0.0"
|
||||
process-nextick-args "~1.0.6"
|
||||
safe-buffer "~5.1.1"
|
||||
string_decoder "~1.0.3"
|
||||
util-deprecate "~1.0.1"
|
||||
|
||||
rechoir@^0.6.2:
|
||||
version "0.6.2"
|
||||
resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384"
|
||||
dependencies:
|
||||
resolve "^1.1.6"
|
||||
|
||||
resolve@^1.1.6:
|
||||
version "1.3.3"
|
||||
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.3.3.tgz#655907c3469a8680dc2de3a275a8fdd69691f0e5"
|
||||
dependencies:
|
||||
path-parse "^1.0.5"
|
||||
|
||||
safe-buffer@~5.1.0, safe-buffer@~5.1.1:
|
||||
version "5.1.1"
|
||||
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853"
|
||||
|
||||
semver@^5.3.0:
|
||||
version "5.3.0"
|
||||
resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f"
|
||||
|
||||
shelljs@^0.7.8:
|
||||
version "0.7.8"
|
||||
resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.7.8.tgz#decbcf874b0d1e5fb72e14b164a9683048e9acb3"
|
||||
dependencies:
|
||||
glob "^7.0.0"
|
||||
interpret "^1.0.0"
|
||||
rechoir "^0.6.2"
|
||||
|
||||
smartchai@^1.0.3:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/smartchai/-/smartchai-1.0.3.tgz#de6d010bb8b5aef24cb70b31a5f5334e8c41b72f"
|
||||
dependencies:
|
||||
"@types/chai" "^3.4.35"
|
||||
"@types/chai-as-promised" "0.0.29"
|
||||
"@types/chai-string" "^1.1.30"
|
||||
chai "^3.5.0"
|
||||
chai-as-promised "^6.0.0"
|
||||
chai-string "^1.3.0"
|
||||
|
||||
smartdelay@^1.0.3:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/smartdelay/-/smartdelay-1.0.3.tgz#5fd44dad77262d110702f0293efa80c072cfb579"
|
||||
dependencies:
|
||||
smartq "^1.1.1"
|
||||
typings-global "^1.0.16"
|
||||
|
||||
smartq@^1.1.1:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/smartq/-/smartq-1.1.1.tgz#efb358705260d41ae18aef7ffd815f7b6fe17dd3"
|
||||
dependencies:
|
||||
typed-promisify "^0.3.0"
|
||||
typings-global "^1.0.14"
|
||||
|
||||
smartq@^1.1.6:
|
||||
version "1.1.6"
|
||||
resolved "https://registry.yarnpkg.com/smartq/-/smartq-1.1.6.tgz#0c1ff4336d95e95b4f1fdd8ccd7e2c5a323b8412"
|
||||
dependencies:
|
||||
typings-global "^1.0.19"
|
||||
util.promisify "^1.0.0"
|
||||
|
||||
smartshell@^1.0.6:
|
||||
version "1.0.8"
|
||||
resolved "https://registry.yarnpkg.com/smartshell/-/smartshell-1.0.8.tgz#1535756c0fe8069f7e6da1e3f9cb6c8f77094e42"
|
||||
dependencies:
|
||||
"@types/shelljs" "^0.7.2"
|
||||
"@types/which" "^1.0.28"
|
||||
shelljs "^0.7.8"
|
||||
smartq "^1.1.1"
|
||||
typings-global "^1.0.19"
|
||||
which "^1.2.14"
|
||||
|
||||
string_decoder@~1.0.3:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.0.3.tgz#0fc67d7c141825de94282dd536bec6b9bce860ab"
|
||||
dependencies:
|
||||
safe-buffer "~5.1.0"
|
||||
|
||||
tapbundle@^1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/tapbundle/-/tapbundle-1.1.0.tgz#e0547f683ae36260f639ecd7435df95f0af01683"
|
||||
dependencies:
|
||||
early "^2.1.1"
|
||||
leakage "^0.3.0"
|
||||
smartchai "^1.0.3"
|
||||
smartdelay "^1.0.3"
|
||||
smartq "^1.1.1"
|
||||
typings-global "^1.0.19"
|
||||
|
||||
through2@^2.0.3:
|
||||
version "2.0.3"
|
||||
resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.3.tgz#0004569b37c7c74ba39c43f3ced78d1ad94140be"
|
||||
dependencies:
|
||||
readable-stream "^2.1.5"
|
||||
xtend "~4.0.1"
|
||||
|
||||
type-detect@0.1.1:
|
||||
version "0.1.1"
|
||||
resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-0.1.1.tgz#0ba5ec2a885640e470ea4e8505971900dac58822"
|
||||
|
||||
type-detect@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-1.0.0.tgz#762217cc06db258ec48908a1298e8b95121e8ea2"
|
||||
|
||||
typed-promisify@^0.3.0:
|
||||
version "0.3.0"
|
||||
resolved "https://registry.yarnpkg.com/typed-promisify/-/typed-promisify-0.3.0.tgz#1ba0af5e444c87d8047406f18ce49092a1191853"
|
||||
|
||||
typings-global@^1.0.14, typings-global@^1.0.16, typings-global@^1.0.19:
|
||||
version "1.0.19"
|
||||
resolved "https://registry.yarnpkg.com/typings-global/-/typings-global-1.0.19.tgz#3376a72d4de1e5541bf5702248ff64c3e6ea316c"
|
||||
dependencies:
|
||||
semver "^5.3.0"
|
||||
smartshell "^1.0.6"
|
||||
|
||||
util-deprecate@~1.0.1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
|
||||
|
||||
util.promisify@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.0.tgz#440f7165a459c9a16dc145eb8e72f35687097030"
|
||||
dependencies:
|
||||
define-properties "^1.1.2"
|
||||
object.getownpropertydescriptors "^2.0.3"
|
||||
|
||||
which@^1.2.14:
|
||||
version "1.2.14"
|
||||
resolved "https://registry.yarnpkg.com/which/-/which-1.2.14.tgz#9a87c4378f03e827cecaf1acdf56c736c01c14e5"
|
||||
dependencies:
|
||||
isexe "^2.0.0"
|
||||
|
||||
wrappy@1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
|
||||
|
||||
xtend@~4.0.1:
|
||||
version "4.0.1"
|
||||
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af"
|
Reference in New Issue
Block a user