Compare commits
82 Commits
Author | SHA1 | Date | |
---|---|---|---|
b925e5e662 | |||
98a5d2c94d | |||
0e735cba20 | |||
f815457801 | |||
f7e47ae354 | |||
684e893801 | |||
d4b381d33d | |||
291a11aa60 | |||
ca592afec9 | |||
8b07197224 | |||
b60fd15ec6 | |||
853eccc780 | |||
c26aff85b5 | |||
321e4d9dea | |||
3d2789857c | |||
07b88a078d | |||
6fee0028d8 | |||
629c52f9bc | |||
fd056c29e9 | |||
36c456b509 | |||
16f8c25557 | |||
219e070ba2 | |||
ee97e1d88b | |||
279db74568 | |||
b84c504f11 | |||
7b3194cc13 | |||
e1e821efec | |||
6b613d1b8a | |||
70f1c58a82 | |||
5df76ca94b | |||
32cfda3c90 | |||
dd521398ea | |||
038e6cc33d | |||
2fc37d6892 | |||
3c1eb1ab70 | |||
5296e8859b | |||
160e0ae451 | |||
373c6538ae | |||
7a1476e106 | |||
b2a2035f00 | |||
03e4f03035 | |||
d74bbb2b12 | |||
22cfe1f5cb | |||
5fc2c4586d | |||
6ab81fb323 | |||
58ec27a1a0 | |||
88811646b7 | |||
412bb52eee | |||
b04750ecbd | |||
0c99475888 | |||
86317def88 | |||
0a5af0ba96 | |||
cf73ff4a54 | |||
788897e765 | |||
7fa3894f6e | |||
afdd654664 | |||
8277e0ca6d | |||
0892c87a68 | |||
841ba2e14d | |||
5b2953bf02 | |||
d3457fd65b | |||
3bf1eafe6b | |||
04a9b992d7 | |||
6206b55deb | |||
6c05bf6ae3 | |||
371074afc1 | |||
e0e665fe6d | |||
5483202972 | |||
24b3458888 | |||
cffa47ac3d | |||
9dadf3f78f | |||
b35a671fe9 | |||
0ca38c109e | |||
4e2321e1ee | |||
a640ab3d7b | |||
37d6d56287 | |||
443a026502 | |||
9644c5b7e3 | |||
25faa8c697 | |||
982387aaa3 | |||
4a11f50efe | |||
0ddec29392 |
66
.gitea/workflows/default_nottags.yaml
Normal file
66
.gitea/workflows/default_nottags.yaml
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
name: Default (not tags)
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags-ignore:
|
||||||
|
- '**'
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||||
|
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
|
||||||
|
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||||
|
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||||
|
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||||
|
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
security:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
continue-on-error: true
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Install pnpm and npmci
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @shipzone/npmci
|
||||||
|
|
||||||
|
- name: Run npm prepare
|
||||||
|
run: npmci npm prepare
|
||||||
|
|
||||||
|
- name: Audit production dependencies
|
||||||
|
run: |
|
||||||
|
npmci command npm config set registry https://registry.npmjs.org
|
||||||
|
npmci command pnpm audit --audit-level=high --prod
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Audit development dependencies
|
||||||
|
run: |
|
||||||
|
npmci command npm config set registry https://registry.npmjs.org
|
||||||
|
npmci command pnpm audit --audit-level=high --dev
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
test:
|
||||||
|
if: ${{ always() }}
|
||||||
|
needs: security
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Test stable
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
npmci npm test
|
||||||
|
|
||||||
|
- name: Test build
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
npmci npm build
|
124
.gitea/workflows/default_tags.yaml
Normal file
124
.gitea/workflows/default_tags.yaml
Normal file
@ -0,0 +1,124 @@
|
|||||||
|
name: Default (tags)
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- '*'
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||||
|
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
|
||||||
|
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||||
|
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||||
|
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||||
|
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
security:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
continue-on-error: true
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @shipzone/npmci
|
||||||
|
npmci npm prepare
|
||||||
|
|
||||||
|
- name: Audit production dependencies
|
||||||
|
run: |
|
||||||
|
npmci command npm config set registry https://registry.npmjs.org
|
||||||
|
npmci command pnpm audit --audit-level=high --prod
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Audit development dependencies
|
||||||
|
run: |
|
||||||
|
npmci command npm config set registry https://registry.npmjs.org
|
||||||
|
npmci command pnpm audit --audit-level=high --dev
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
test:
|
||||||
|
if: ${{ always() }}
|
||||||
|
needs: security
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @shipzone/npmci
|
||||||
|
npmci npm prepare
|
||||||
|
|
||||||
|
- name: Test stable
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
npmci npm test
|
||||||
|
|
||||||
|
- name: Test build
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
npmci npm build
|
||||||
|
|
||||||
|
release:
|
||||||
|
needs: test
|
||||||
|
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @shipzone/npmci
|
||||||
|
npmci npm prepare
|
||||||
|
|
||||||
|
- name: Release
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm publish
|
||||||
|
|
||||||
|
metadata:
|
||||||
|
needs: test
|
||||||
|
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @shipzone/npmci
|
||||||
|
npmci npm prepare
|
||||||
|
|
||||||
|
- name: Code quality
|
||||||
|
run: |
|
||||||
|
npmci command npm install -g typescript
|
||||||
|
npmci npm install
|
||||||
|
|
||||||
|
- name: Trigger
|
||||||
|
run: npmci trigger
|
||||||
|
|
||||||
|
- name: Build docs and upload artifacts
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
pnpm install -g @git.zone/tsdoc
|
||||||
|
npmci command tsdoc
|
||||||
|
continue-on-error: true
|
141
.gitlab-ci.yml
141
.gitlab-ci.yml
@ -1,141 +0,0 @@
|
|||||||
# gitzone ci_default
|
|
||||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
|
||||||
|
|
||||||
cache:
|
|
||||||
paths:
|
|
||||||
- .npmci_cache/
|
|
||||||
key: '$CI_BUILD_STAGE'
|
|
||||||
|
|
||||||
stages:
|
|
||||||
- security
|
|
||||||
- test
|
|
||||||
- release
|
|
||||||
- metadata
|
|
||||||
|
|
||||||
before_script:
|
|
||||||
- npm install -g @shipzone/npmci
|
|
||||||
|
|
||||||
# ====================
|
|
||||||
# security stage
|
|
||||||
# ====================
|
|
||||||
mirror:
|
|
||||||
stage: security
|
|
||||||
script:
|
|
||||||
- npmci git mirror
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
auditProductionDependencies:
|
|
||||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
|
||||||
stage: security
|
|
||||||
script:
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci command npm install --production --ignore-scripts
|
|
||||||
- npmci command npm config set registry https://registry.npmjs.org
|
|
||||||
- npmci command npm audit --audit-level=high --only=prod --production
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
allow_failure: true
|
|
||||||
|
|
||||||
auditDevDependencies:
|
|
||||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
|
||||||
stage: security
|
|
||||||
script:
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci command npm install --ignore-scripts
|
|
||||||
- npmci command npm config set registry https://registry.npmjs.org
|
|
||||||
- npmci command npm audit --audit-level=high --only=dev
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
allow_failure: true
|
|
||||||
|
|
||||||
# ====================
|
|
||||||
# test stage
|
|
||||||
# ====================
|
|
||||||
|
|
||||||
testStable:
|
|
||||||
stage: test
|
|
||||||
script:
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci node install stable
|
|
||||||
- npmci npm install
|
|
||||||
- npmci npm test
|
|
||||||
coverage: /\d+.?\d+?\%\s*coverage/
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
|
|
||||||
testBuild:
|
|
||||||
stage: test
|
|
||||||
script:
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci node install stable
|
|
||||||
- npmci npm install
|
|
||||||
- npmci command npm run build
|
|
||||||
coverage: /\d+.?\d+?\%\s*coverage/
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
|
|
||||||
release:
|
|
||||||
stage: release
|
|
||||||
script:
|
|
||||||
- npmci node install stable
|
|
||||||
- npmci npm publish
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
# ====================
|
|
||||||
# metadata stage
|
|
||||||
# ====================
|
|
||||||
codequality:
|
|
||||||
stage: metadata
|
|
||||||
allow_failure: true
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
script:
|
|
||||||
- npmci command npm install -g tslint typescript
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci npm install
|
|
||||||
- npmci command "tslint -c tslint.json ./ts/**/*.ts"
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- priv
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
stage: metadata
|
|
||||||
script:
|
|
||||||
- npmci trigger
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
pages:
|
|
||||||
stage: metadata
|
|
||||||
script:
|
|
||||||
- npmci node install lts
|
|
||||||
- npmci command npm install -g @gitzone/tsdoc
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci npm install
|
|
||||||
- npmci command tsdoc
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
artifacts:
|
|
||||||
expire_in: 1 week
|
|
||||||
paths:
|
|
||||||
- public
|
|
||||||
allow_failure: true
|
|
@ -8,10 +8,10 @@
|
|||||||
"projectType": "npm",
|
"projectType": "npm",
|
||||||
"module": {
|
"module": {
|
||||||
"githost": "gitlab.com",
|
"githost": "gitlab.com",
|
||||||
"gitscope": "pushrocks",
|
"gitscope": "push.rocks",
|
||||||
"gitrepo": "smartfile",
|
"gitrepo": "smartfile",
|
||||||
"description": "smart ways to work with files in nodejs",
|
"description": "smart ways to work with files in nodejs",
|
||||||
"npmPackagename": "@pushrocks/smartfile",
|
"npmPackagename": "@push.rocks/smartfile",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
11317
package-lock.json
generated
11317
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
55
package.json
55
package.json
@ -1,18 +1,19 @@
|
|||||||
{
|
{
|
||||||
"name": "@pushrocks/smartfile",
|
"name": "@push.rocks/smartfile",
|
||||||
"private": false,
|
"private": false,
|
||||||
"version": "10.0.1",
|
"version": "11.0.0",
|
||||||
"description": "offers smart ways to work with files in nodejs",
|
"description": "offers smart ways to work with files in nodejs",
|
||||||
"main": "dist_ts/index.js",
|
"main": "dist_ts/index.js",
|
||||||
"typings": "dist_ts/index.d.ts",
|
"typings": "dist_ts/index.d.ts",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "(tstest test/)",
|
"test": "(tstest test/)",
|
||||||
"build": "(tsbuild --web --allowimplicitany)"
|
"build": "(tsbuild --web --allowimplicitany)",
|
||||||
|
"buildDocs": "tsdoc"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://gitlab.com/pushrocks/smartfile.git"
|
"url": "git+https://gitlab.com/push.rocks/smartfile.git"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"filesystem",
|
"filesystem",
|
||||||
@ -21,35 +22,33 @@
|
|||||||
"author": "Lossless GmbH <hello@lossless.com> (https://lossless.com)",
|
"author": "Lossless GmbH <hello@lossless.com> (https://lossless.com)",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://gitlab.com/pushrocks/smartfile/issues"
|
"url": "https://gitlab.com/push.rocks/smartfile/issues"
|
||||||
},
|
},
|
||||||
"homepage": "https://gitlab.com/pushrocks/smartfile",
|
"homepage": "https://gitlab.com/push.rocks/smartfile#readme",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@pushrocks/lik": "^6.0.0",
|
"@push.rocks/lik": "^6.0.5",
|
||||||
"@pushrocks/smartdelay": "^2.0.13",
|
"@push.rocks/smartdelay": "^3.0.5",
|
||||||
"@pushrocks/smartfile-interfaces": "^1.0.7",
|
"@push.rocks/smartfile-interfaces": "^1.0.7",
|
||||||
"@pushrocks/smarthash": "^2.1.10",
|
"@push.rocks/smarthash": "^3.0.4",
|
||||||
"@pushrocks/smartjson": "^4.0.6",
|
"@push.rocks/smartjson": "^5.0.10",
|
||||||
"@pushrocks/smartmime": "^1.0.5",
|
"@push.rocks/smartmime": "^1.0.5",
|
||||||
"@pushrocks/smartpath": "^5.0.5",
|
"@push.rocks/smartpath": "^5.0.11",
|
||||||
"@pushrocks/smartpromise": "^3.1.6",
|
"@push.rocks/smartpromise": "^4.0.2",
|
||||||
"@pushrocks/smartrequest": "^1.1.56",
|
"@push.rocks/smartrequest": "^2.0.20",
|
||||||
"@pushrocks/smartstream": "^2.0.1",
|
"@push.rocks/smartstream": "^3.0.7",
|
||||||
"@pushrocks/streamfunction": "^4.0.4",
|
"@types/fs-extra": "^11.0.3",
|
||||||
"@types/fs-extra": "^9.0.13",
|
"@types/glob": "^8.1.0",
|
||||||
"@types/glob": "^7.2.0",
|
"@types/js-yaml": "^4.0.8",
|
||||||
"@types/js-yaml": "^4.0.5",
|
"fs-extra": "^11.1.1",
|
||||||
"fs-extra": "^10.1.0",
|
"glob": "^10.3.10",
|
||||||
"glob": "^8.0.3",
|
|
||||||
"js-yaml": "^4.1.0"
|
"js-yaml": "^4.1.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@gitzone/tsbuild": "^2.1.63",
|
"@git.zone/tsbuild": "^2.1.70",
|
||||||
"@gitzone/tsrun": "^1.2.35",
|
"@git.zone/tsrun": "^1.2.46",
|
||||||
"@gitzone/tstest": "^1.0.71",
|
"@git.zone/tstest": "^1.0.81",
|
||||||
"@pushrocks/tapbundle": "^5.0.3",
|
"@push.rocks/tapbundle": "^5.0.15",
|
||||||
"@types/node": "^17.0.40",
|
"@types/node": "^20.8.10"
|
||||||
"gulp-function": "^2.2.14"
|
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
"ts/**/*",
|
"ts/**/*",
|
||||||
|
5583
pnpm-lock.yaml
generated
Normal file
5583
pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
30
readme.md
30
readme.md
@ -1,27 +1,26 @@
|
|||||||
# @pushrocks/smartfile
|
# @push.rocks/smartfile
|
||||||
smart ways to work with files in nodejs
|
smart ways to work with files in nodejs
|
||||||
|
|
||||||
## Availabililty and Links
|
## Availabililty and Links
|
||||||
* [npmjs.org (npm package)](https://www.npmjs.com/package/@pushrocks/smartfile)
|
* [npmjs.org (npm package)](https://www.npmjs.com/package/@push.rocks/smartfile)
|
||||||
* [gitlab.com (source)](https://gitlab.com/pushrocks/smartfile)
|
* [gitlab.com (source)](https://gitlab.com/push.rocks/smartfile)
|
||||||
* [github.com (source mirror)](https://github.com/pushrocks/smartfile)
|
* [github.com (source mirror)](https://github.com/push.rocks/smartfile)
|
||||||
* [docs (typedoc)](https://pushrocks.gitlab.io/smartfile/)
|
* [docs (typedoc)](https://push.rocks.gitlab.io/smartfile/)
|
||||||
|
|
||||||
## Status for master
|
## Status for master
|
||||||
|
|
||||||
Status Category | Status Badge
|
Status Category | Status Badge
|
||||||
-- | --
|
-- | --
|
||||||
GitLab Pipelines | [](https://lossless.cloud)
|
GitLab Pipelines | [](https://lossless.cloud)
|
||||||
GitLab Pipline Test Coverage | [](https://lossless.cloud)
|
GitLab Pipline Test Coverage | [](https://lossless.cloud)
|
||||||
npm | [](https://lossless.cloud)
|
npm | [](https://lossless.cloud)
|
||||||
Snyk | [](https://lossless.cloud)
|
Snyk | [](https://lossless.cloud)
|
||||||
TypeScript Support | [](https://lossless.cloud)
|
TypeScript Support | [](https://lossless.cloud)
|
||||||
node Support | [](https://nodejs.org/dist/latest-v10.x/docs/api/)
|
node Support | [](https://nodejs.org/dist/latest-v10.x/docs/api/)
|
||||||
Code Style | [](https://lossless.cloud)
|
Code Style | [](https://lossless.cloud)
|
||||||
PackagePhobia (total standalone install weight) | [](https://lossless.cloud)
|
PackagePhobia (total standalone install weight) | [](https://lossless.cloud)
|
||||||
PackagePhobia (package size on registry) | [](https://lossless.cloud)
|
PackagePhobia (package size on registry) | [](https://lossless.cloud)
|
||||||
BundlePhobia (total size when bundled) | [](https://lossless.cloud)
|
BundlePhobia (total size when bundled) | [](https://lossless.cloud)
|
||||||
Platform support | [](https://lossless.cloud) [](https://lossless.cloud)
|
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
@ -45,7 +44,6 @@ We are always happy for code contributions. If you are not the code contributing
|
|||||||
|
|
||||||
For further information read the linked docs at the top of this readme.
|
For further information read the linked docs at the top of this readme.
|
||||||
|
|
||||||
> MIT licensed | **©** [Lossless GmbH](https://lossless.gmbh)
|
## Legal
|
||||||
|
> MIT licensed | **©** [Task Venture Capital GmbH](https://task.vc)
|
||||||
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy)
|
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy)
|
||||||
|
|
||||||
[](https://maintainedby.lossless.com)
|
|
||||||
|
67
test/test.streamfile.ts
Normal file
67
test/test.streamfile.ts
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
import * as path from 'path';
|
||||||
|
import { expect, tap } from '@push.rocks/tapbundle';
|
||||||
|
import * as smartfile from '../ts/index.js'; // adjust the import path as needed
|
||||||
|
|
||||||
|
// Test assets path
|
||||||
|
const testAssetsPath = './test/testassets/';
|
||||||
|
|
||||||
|
// ---------------------------
|
||||||
|
// StreamFile tests
|
||||||
|
// ---------------------------
|
||||||
|
|
||||||
|
tap.test('StreamFile.fromPath should create a StreamFile from a file path', async () => {
|
||||||
|
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
|
||||||
|
expect(streamFile).toBeInstanceOf(smartfile.StreamFile);
|
||||||
|
const contentBuffer = await streamFile.getContentAsBuffer();
|
||||||
|
expect(contentBuffer).toBeInstanceOf(Buffer);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('StreamFile.fromUrl should create a StreamFile from a URL', async () => {
|
||||||
|
const streamFile = await smartfile.StreamFile.fromUrl('http://example.com/somefile.json');
|
||||||
|
expect(streamFile).toBeInstanceOf(smartfile.StreamFile);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('StreamFile.fromBuffer should create a StreamFile from a Buffer', async () => {
|
||||||
|
const buffer = Buffer.from('Some content');
|
||||||
|
const streamFile = smartfile.StreamFile.fromBuffer(buffer, 'bufferfile.txt');
|
||||||
|
expect(streamFile).toBeInstanceOf(smartfile.StreamFile);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('StreamFile should write the stream to disk', async () => {
|
||||||
|
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
|
||||||
|
await streamFile.writeToDisk(path.join(testAssetsPath, 'temp', 'mytest.json'));
|
||||||
|
// Verify the file was written
|
||||||
|
expect(
|
||||||
|
// We'll use the fileExists method from your smartfile library
|
||||||
|
// Replace with the actual method you use to check file existence
|
||||||
|
await smartfile.fs.fileExists(path.join(testAssetsPath, 'temp', 'mytest.json'))
|
||||||
|
).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('StreamFile should write to a directory', async () => {
|
||||||
|
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
|
||||||
|
await streamFile.writeToDir(path.join(testAssetsPath, 'temp'));
|
||||||
|
// Verify the file was written
|
||||||
|
expect(
|
||||||
|
await smartfile.fs.fileExists(path.join(testAssetsPath, 'temp', 'mytest.json'))
|
||||||
|
).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('StreamFile should return content as a buffer', async () => {
|
||||||
|
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
|
||||||
|
const contentBuffer = await streamFile.getContentAsBuffer();
|
||||||
|
expect(contentBuffer).toBeInstanceOf(Buffer);
|
||||||
|
// Further checks on the content can be added here if necessary
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('StreamFile should return content as a string', async () => {
|
||||||
|
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
|
||||||
|
const contentString = await streamFile.getContentAsString();
|
||||||
|
expect(typeof contentString).toBeTypeofString();
|
||||||
|
// Verify the content matches what's expected
|
||||||
|
// This assumes the file contains a JSON object with a key 'key1' with value 'this works'
|
||||||
|
expect(JSON.parse(contentString).key1).toEqual('this works');
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start the test sequence
|
||||||
|
tap.start();
|
26
test/test.ts
26
test/test.ts
@ -1,7 +1,7 @@
|
|||||||
import * as smartfile from '../ts/index.js';
|
import * as smartfile from '../ts/index.js';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
|
|
||||||
import { expect, tap } from '@pushrocks/tapbundle';
|
import { expect, tap } from '@push.rocks/tapbundle';
|
||||||
|
|
||||||
// ---------------------------
|
// ---------------------------
|
||||||
// smartfile.fs
|
// smartfile.fs
|
||||||
@ -38,9 +38,7 @@ tap.test('.fs.listFolders() -> should get the file type from a string', async ()
|
|||||||
tap.test('.fs.listFilesSync() -> should get the file type from a string', async () => {
|
tap.test('.fs.listFilesSync() -> should get the file type from a string', async () => {
|
||||||
expect(smartfile.fs.listFilesSync('./test/testassets/')).toContain('mytest.json');
|
expect(smartfile.fs.listFilesSync('./test/testassets/')).toContain('mytest.json');
|
||||||
expect(smartfile.fs.listFilesSync('./test/testassets/')).not.toContain('notExistentFile');
|
expect(smartfile.fs.listFilesSync('./test/testassets/')).not.toContain('notExistentFile');
|
||||||
expect(smartfile.fs.listFilesSync('./test/testassets/', /mytest\.json/)).toContain(
|
expect(smartfile.fs.listFilesSync('./test/testassets/', /mytest\.json/)).toContain('mytest.json');
|
||||||
'mytest.json'
|
|
||||||
);
|
|
||||||
expect(smartfile.fs.listFilesSync('./test/testassets/', /mytests.json/)).not.toContain(
|
expect(smartfile.fs.listFilesSync('./test/testassets/', /mytests.json/)).not.toContain(
|
||||||
'mytest.json'
|
'mytest.json'
|
||||||
);
|
);
|
||||||
@ -66,7 +64,7 @@ tap.test('.fs.fileTreeToObject -> should read a file tree into an Object', async
|
|||||||
path.resolve('./test/testassets/'),
|
path.resolve('./test/testassets/'),
|
||||||
'**/*.txt'
|
'**/*.txt'
|
||||||
);
|
);
|
||||||
expect(fileArrayArg[0]).toBeInstanceOf(smartfile.Smartfile);
|
expect(fileArrayArg[0]).toBeInstanceOf(smartfile.SmartFile);
|
||||||
expect(fileArrayArg[0].contents.toString()).toEqual(fileArrayArg[0].contentBuffer.toString());
|
expect(fileArrayArg[0].contents.toString()).toEqual(fileArrayArg[0].contentBuffer.toString());
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -133,9 +131,7 @@ tap.test('.fs.toObjectSync() -> should read an .json file to an object', async (
|
|||||||
});
|
});
|
||||||
|
|
||||||
tap.test('.fs.toStringSync() -> should read a file to a string', async () => {
|
tap.test('.fs.toStringSync() -> should read a file to a string', async () => {
|
||||||
expect(smartfile.fs.toStringSync('./test/testassets/mytest.txt')).toEqual(
|
expect(smartfile.fs.toStringSync('./test/testassets/mytest.txt')).toEqual('Some TestString &&%$');
|
||||||
'Some TestString &&%$'
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// ---------------------------
|
// ---------------------------
|
||||||
@ -179,7 +175,7 @@ tap.test('.Smartfile -> should produce vinyl compatible files', async () => {
|
|||||||
'./test/testassets/testfolder/**/*'
|
'./test/testassets/testfolder/**/*'
|
||||||
);
|
);
|
||||||
const localSmartfile = smartfileArray[0];
|
const localSmartfile = smartfileArray[0];
|
||||||
expect(localSmartfile).toBeInstanceOf(smartfile.Smartfile);
|
expect(localSmartfile).toBeInstanceOf(smartfile.SmartFile);
|
||||||
expect(localSmartfile.contents).toBeInstanceOf(Buffer);
|
expect(localSmartfile.contents).toBeInstanceOf(Buffer);
|
||||||
// tslint:disable-next-line:no-unused-expression
|
// tslint:disable-next-line:no-unused-expression
|
||||||
expect(localSmartfile.isBuffer()).toBeTrue();
|
expect(localSmartfile.isBuffer()).toBeTrue();
|
||||||
@ -206,13 +202,21 @@ tap.test('should output a smartfile array to disk', async () => {
|
|||||||
tap.test('should create, store and retrieve valid smartfiles', async () => {
|
tap.test('should create, store and retrieve valid smartfiles', async () => {
|
||||||
const fileString = 'hi there';
|
const fileString = 'hi there';
|
||||||
const filePath = './test/testassets/utf8.txt';
|
const filePath = './test/testassets/utf8.txt';
|
||||||
const smartfileInstance = await smartfile.Smartfile.fromString(filePath, fileString, 'utf8');
|
const smartfileInstance = await smartfile.SmartFile.fromString(filePath, fileString, 'utf8');
|
||||||
smartfileInstance.write();
|
smartfileInstance.write();
|
||||||
const smartfileInstance2 = await smartfile.Smartfile.fromFilePath(filePath);
|
const smartfileInstance2 = await smartfile.SmartFile.fromFilePath(filePath);
|
||||||
const retrievedString = smartfileInstance.contents.toString();
|
const retrievedString = smartfileInstance.contents.toString();
|
||||||
expect(retrievedString).toEqual(fileString);
|
expect(retrievedString).toEqual(fileString);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
tap.test('should get a hash', async () => {
|
||||||
|
const fileString = 'hi there';
|
||||||
|
const filePath = './test/testassets/utf8.txt';
|
||||||
|
const smartfileInstance = await smartfile.SmartFile.fromString(filePath, fileString, 'utf8');
|
||||||
|
const hash = await smartfileInstance.getHash();
|
||||||
|
console.log(hash);
|
||||||
|
});
|
||||||
|
|
||||||
tap.test('should wait for file to be ready', async () => {
|
tap.test('should wait for file to be ready', async () => {
|
||||||
await smartfile.fs.waitForFileToBeReady('./test/testassets/mytest.json');
|
await smartfile.fs.waitForFileToBeReady('./test/testassets/mytest.json');
|
||||||
});
|
});
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { tap, expect } from '@pushrocks/tapbundle';
|
import { tap, expect } from '@push.rocks/tapbundle';
|
||||||
|
|
||||||
import * as smartfile from '../ts/index.js';
|
import * as smartfile from '../ts/index.js';
|
||||||
|
|
||||||
|
8
test/testassets/temp/mytest.json
Normal file
8
test/testassets/temp/mytest.json
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"key1": "this works",
|
||||||
|
"key2": "this works too",
|
||||||
|
"key3": {
|
||||||
|
"nestedkey1": "hello"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
8
test/testassets/temp/test/testassets/mytest.json
Normal file
8
test/testassets/temp/test/testassets/mytest.json
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"key1": "this works",
|
||||||
|
"key2": "this works too",
|
||||||
|
"key3": {
|
||||||
|
"nestedkey1": "hello"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -2,7 +2,7 @@
|
|||||||
* autocreated commitinfo by @pushrocks/commitinfo
|
* autocreated commitinfo by @pushrocks/commitinfo
|
||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@pushrocks/smartfile',
|
name: '@push.rocks/smartfile',
|
||||||
version: '10.0.1',
|
version: '11.0.0',
|
||||||
description: 'offers smart ways to work with files in nodejs'
|
description: 'offers smart ways to work with files in nodejs'
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import * as plugins from './smartfile.plugins.js';
|
import * as plugins from './smartfile.plugins.js';
|
||||||
import * as fs from './smartfile.fs.js';
|
import * as fs from './fs.js';
|
||||||
import * as memory from './smartfile.memory.js';
|
import * as memory from './memory.js';
|
||||||
|
|
||||||
export interface ISmartfileConstructorOptions {
|
export interface ISmartfileConstructorOptions {
|
||||||
path: string;
|
path: string;
|
||||||
@ -9,10 +9,9 @@ export interface ISmartfileConstructorOptions {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* class Smartfile
|
* an vinyl file compatible in memory file class
|
||||||
* -> is vinyl file compatible
|
|
||||||
*/
|
*/
|
||||||
export class Smartfile extends plugins.smartjson.Smartjson {
|
export class SmartFile extends plugins.smartjson.Smartjson {
|
||||||
// ======
|
// ======
|
||||||
// STATIC
|
// STATIC
|
||||||
// ======
|
// ======
|
||||||
@ -24,7 +23,7 @@ export class Smartfile extends plugins.smartjson.Smartjson {
|
|||||||
public static async fromFilePath(filePath: string, baseArg: string = process.cwd()) {
|
public static async fromFilePath(filePath: string, baseArg: string = process.cwd()) {
|
||||||
filePath = plugins.path.resolve(filePath);
|
filePath = plugins.path.resolve(filePath);
|
||||||
const fileBuffer = fs.toBufferSync(filePath);
|
const fileBuffer = fs.toBufferSync(filePath);
|
||||||
const smartfile = new Smartfile({
|
const smartfile = new SmartFile({
|
||||||
contentBuffer: fileBuffer,
|
contentBuffer: fileBuffer,
|
||||||
base: baseArg,
|
base: baseArg,
|
||||||
path: plugins.path.relative(baseArg, filePath),
|
path: plugins.path.relative(baseArg, filePath),
|
||||||
@ -37,7 +36,7 @@ export class Smartfile extends plugins.smartjson.Smartjson {
|
|||||||
contentBufferArg: Buffer,
|
contentBufferArg: Buffer,
|
||||||
baseArg: string = process.cwd()
|
baseArg: string = process.cwd()
|
||||||
) {
|
) {
|
||||||
const smartfile = new Smartfile({
|
const smartfile = new SmartFile({
|
||||||
contentBuffer: contentBufferArg,
|
contentBuffer: contentBufferArg,
|
||||||
base: baseArg,
|
base: baseArg,
|
||||||
path: plugins.path.relative(baseArg, filePath),
|
path: plugins.path.relative(baseArg, filePath),
|
||||||
@ -52,7 +51,7 @@ export class Smartfile extends plugins.smartjson.Smartjson {
|
|||||||
encodingArg: 'utf8' | 'binary',
|
encodingArg: 'utf8' | 'binary',
|
||||||
baseArg = process.cwd()
|
baseArg = process.cwd()
|
||||||
) {
|
) {
|
||||||
const smartfile = new Smartfile({
|
const smartfile = new SmartFile({
|
||||||
contentBuffer: Buffer.from(contentStringArg, encodingArg),
|
contentBuffer: Buffer.from(contentStringArg, encodingArg),
|
||||||
base: baseArg,
|
base: baseArg,
|
||||||
path: plugins.path.relative(baseArg, filePath),
|
path: plugins.path.relative(baseArg, filePath),
|
||||||
@ -62,7 +61,34 @@ export class Smartfile extends plugins.smartjson.Smartjson {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static async fromFoldedJson(foldedJsonArg: string) {
|
public static async fromFoldedJson(foldedJsonArg: string) {
|
||||||
return new Smartfile(plugins.smartjson.parse(foldedJsonArg));
|
return new SmartFile(plugins.smartjson.parse(foldedJsonArg));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* creates a Smartfile from a ReadableStream
|
||||||
|
* @param stream a readable stream that provides file content
|
||||||
|
* @param filePath the file path to associate with the content
|
||||||
|
* @param baseArg the base path to use for the file
|
||||||
|
*/
|
||||||
|
public static async fromStream(
|
||||||
|
stream: plugins.stream.Readable,
|
||||||
|
filePath: string,
|
||||||
|
baseArg: string = process.cwd()
|
||||||
|
): Promise<SmartFile> {
|
||||||
|
return new Promise<SmartFile>((resolve, reject) => {
|
||||||
|
const chunks: Buffer[] = [];
|
||||||
|
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
|
||||||
|
stream.on('error', (error) => reject(error));
|
||||||
|
stream.on('end', () => {
|
||||||
|
const contentBuffer = Buffer.concat(chunks);
|
||||||
|
const smartfile = new SmartFile({
|
||||||
|
contentBuffer: contentBuffer,
|
||||||
|
base: baseArg,
|
||||||
|
path: plugins.path.relative(baseArg, filePath),
|
||||||
|
});
|
||||||
|
resolve(smartfile);
|
||||||
|
});
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// ========
|
// ========
|
||||||
@ -128,7 +154,7 @@ export class Smartfile extends plugins.smartjson.Smartjson {
|
|||||||
* @param contentString
|
* @param contentString
|
||||||
*/
|
*/
|
||||||
public setContentsFromString(contentString: string, encodingArg: 'utf8' | 'binary' = 'utf8') {
|
public setContentsFromString(contentString: string, encodingArg: 'utf8' | 'binary' = 'utf8') {
|
||||||
this.contents = new Buffer(contentString, encodingArg);
|
this.contents = Buffer.from(contentString, encodingArg);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -237,9 +263,39 @@ export class Smartfile extends plugins.smartjson.Smartjson {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async getHash(typeArg: 'path' | 'content' | 'all' = 'all') {
|
||||||
|
const pathHash = await plugins.smarthash.sha256FromString(this.path);
|
||||||
|
const contentHash = await plugins.smarthash.sha256FromBuffer(this.contentBuffer);
|
||||||
|
const combinedHash = await plugins.smarthash.sha256FromString(pathHash + contentHash);
|
||||||
|
switch (typeArg) {
|
||||||
|
case 'path':
|
||||||
|
return pathHash;
|
||||||
|
case 'content':
|
||||||
|
return contentHash;
|
||||||
|
case 'all':
|
||||||
|
default:
|
||||||
|
return combinedHash;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// update things
|
// update things
|
||||||
public updateFileName(fileNameArg: string) {
|
public updateFileName(fileNameArg: string) {
|
||||||
const oldFileName = this.parsedPath.base;
|
const oldFileName = this.parsedPath.base;
|
||||||
this.path = this.path.replace(new RegExp(oldFileName + '$'), fileNameArg);
|
this.path = this.path.replace(new RegExp(oldFileName + '$'), fileNameArg);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async editContentAsString(editFuncArg: (fileStringArg: string) => Promise<string>) {
|
||||||
|
const newFileString = await editFuncArg(this.contentBuffer.toString());
|
||||||
|
this.contentBuffer = Buffer.from(newFileString);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a ReadableStream from the file's content buffer
|
||||||
|
*/
|
||||||
|
public getStream(): plugins.stream.Readable {
|
||||||
|
const stream = new plugins.stream.Readable();
|
||||||
|
stream.push(this.contentBuffer); // Push the content buffer to the stream
|
||||||
|
stream.push(null); // Push null to signify the end of the stream (EOF)
|
||||||
|
return stream;
|
||||||
|
}
|
||||||
}
|
}
|
152
ts/classes.streamfile.ts
Normal file
152
ts/classes.streamfile.ts
Normal file
@ -0,0 +1,152 @@
|
|||||||
|
import * as plugins from './smartfile.plugins.js';
|
||||||
|
import * as smartfileFs from './fs.js';
|
||||||
|
import * as smartfileFsStream from './fsstream.js';
|
||||||
|
import { Readable } from 'stream';
|
||||||
|
|
||||||
|
type TStreamSource = (streamFile: StreamFile) => Promise<Readable>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The StreamFile class represents a file as a stream.
|
||||||
|
* It allows creating streams from a file path, a URL, or a buffer.
|
||||||
|
*/
|
||||||
|
export class StreamFile {
|
||||||
|
// INSTANCE
|
||||||
|
relativeFilePath?: string;
|
||||||
|
private streamSource: TStreamSource;
|
||||||
|
|
||||||
|
// enable stream based multi use
|
||||||
|
private cachedStreamBuffer?: Buffer;
|
||||||
|
public multiUse: boolean;
|
||||||
|
public used: boolean = false;
|
||||||
|
|
||||||
|
private constructor(streamSource: TStreamSource, relativeFilePath?: string) {
|
||||||
|
this.streamSource = streamSource;
|
||||||
|
this.relativeFilePath = relativeFilePath;
|
||||||
|
}
|
||||||
|
|
||||||
|
// STATIC
|
||||||
|
|
||||||
|
public static async fromPath(filePath: string): Promise<StreamFile> {
|
||||||
|
const streamSource: TStreamSource = async (stremFileArg) => smartfileFsStream.createReadStream(filePath);
|
||||||
|
const streamFile = new StreamFile(streamSource, filePath);
|
||||||
|
streamFile.multiUse = true;
|
||||||
|
return streamFile;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async fromUrl(url: string): Promise<StreamFile> {
|
||||||
|
const streamSource: TStreamSource = async (streamFileArg) => plugins.smartrequest.getStream(url); // Replace with actual plugin method
|
||||||
|
const streamFile = new StreamFile(streamSource);
|
||||||
|
streamFile.multiUse = true;
|
||||||
|
return streamFile;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static fromBuffer(buffer: Buffer, relativeFilePath?: string): StreamFile {
|
||||||
|
const streamSource: TStreamSource = async (streamFileArg) => {
|
||||||
|
const stream = new Readable();
|
||||||
|
stream.push(buffer);
|
||||||
|
stream.push(null); // End of stream
|
||||||
|
return stream;
|
||||||
|
};
|
||||||
|
const streamFile = new StreamFile(streamSource, relativeFilePath);
|
||||||
|
streamFile.multiUse = true;
|
||||||
|
return streamFile;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a StreamFile from an existing Readable stream with an option for multiple uses.
|
||||||
|
* @param stream A Node.js Readable stream.
|
||||||
|
* @param relativeFilePath Optional file path for the stream.
|
||||||
|
* @param multiUse If true, the stream can be read multiple times, caching its content.
|
||||||
|
* @returns A StreamFile instance.
|
||||||
|
*/
|
||||||
|
public static fromStream(stream: Readable, relativeFilePath?: string, multiUse: boolean = false): StreamFile {
|
||||||
|
const streamSource: TStreamSource = (streamFileArg) => {
|
||||||
|
if (streamFileArg.multiUse) {
|
||||||
|
// If multi-use is enabled and we have cached content, create a new readable stream from the buffer
|
||||||
|
const bufferedStream = new Readable();
|
||||||
|
bufferedStream.push(streamFileArg.cachedStreamBuffer);
|
||||||
|
bufferedStream.push(null); // No more data to push
|
||||||
|
return Promise.resolve(bufferedStream);
|
||||||
|
} else {
|
||||||
|
return Promise.resolve(stream);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const streamFile = new StreamFile(streamSource, relativeFilePath);
|
||||||
|
streamFile.multiUse = multiUse;
|
||||||
|
|
||||||
|
// If multi-use is enabled, cache the stream when it's first read
|
||||||
|
if (multiUse) {
|
||||||
|
const chunks: Buffer[] = [];
|
||||||
|
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
|
||||||
|
stream.on('end', () => {
|
||||||
|
streamFile.cachedStreamBuffer = Buffer.concat(chunks);
|
||||||
|
});
|
||||||
|
// It's important to handle errors that may occur during streaming
|
||||||
|
stream.on('error', (err) => {
|
||||||
|
console.error('Error while caching stream:', err);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return streamFile;
|
||||||
|
}
|
||||||
|
|
||||||
|
// METHODS
|
||||||
|
|
||||||
|
private checkMultiUse() {
|
||||||
|
if (!this.multiUse && this.used) {
|
||||||
|
throw new Error('This stream can only be used once.');
|
||||||
|
}
|
||||||
|
this.used = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new readable stream from the source.
|
||||||
|
*/
|
||||||
|
public async createReadStream(): Promise<Readable> {
|
||||||
|
return this.streamSource(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes the stream to the disk at the specified path.
|
||||||
|
* @param filePathArg The file path where the stream should be written.
|
||||||
|
*/
|
||||||
|
public async writeToDisk(filePathArg: string): Promise<void> {
|
||||||
|
this.checkMultiUse();
|
||||||
|
const readStream = await this.createReadStream();
|
||||||
|
const writeStream = smartfileFsStream.createWriteStream(filePathArg);
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
readStream.pipe(writeStream);
|
||||||
|
readStream.on('error', reject);
|
||||||
|
writeStream.on('error', reject);
|
||||||
|
writeStream.on('finish', resolve);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public async writeToDir(dirPathArg: string) {
|
||||||
|
this.checkMultiUse();
|
||||||
|
const filePath = plugins.path.join(dirPathArg, this.relativeFilePath);
|
||||||
|
await smartfileFs.ensureDir(plugins.path.parse(filePath).dir);
|
||||||
|
return this.writeToDisk(filePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getContentAsBuffer() {
|
||||||
|
this.checkMultiUse();
|
||||||
|
const done = plugins.smartpromise.defer<Buffer>();
|
||||||
|
const readStream = await this.createReadStream();
|
||||||
|
const chunks: Buffer[] = [];
|
||||||
|
readStream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
|
||||||
|
readStream.on('error', done.reject);
|
||||||
|
readStream.on('end', () => {
|
||||||
|
const contentBuffer = Buffer.concat(chunks);
|
||||||
|
done.resolve(contentBuffer);
|
||||||
|
});
|
||||||
|
return done.promise;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getContentAsString(formatArg: 'utf8' | 'binary' = 'utf8') {
|
||||||
|
const contentBuffer = await this.getContentAsBuffer();
|
||||||
|
return contentBuffer.toString(formatArg);
|
||||||
|
}
|
||||||
|
}
|
@ -1,11 +1,21 @@
|
|||||||
import { Smartfile } from './smartfile.classes.smartfile.js';
|
import { SmartFile } from './classes.smartfile.js';
|
||||||
import * as plugins from './smartfile.plugins.js';
|
import * as plugins from './smartfile.plugins.js';
|
||||||
import * as fs from './smartfile.fs.js';
|
import * as fs from './fs.js';
|
||||||
|
|
||||||
|
|
||||||
|
export interface IVirtualDirectoryConstructorOptions {
|
||||||
|
mode: ''
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* a virtual directory exposes a fs api
|
* a virtual directory exposes a fs api
|
||||||
*/
|
*/
|
||||||
export class VirtualDirectory {
|
export class VirtualDirectory {
|
||||||
|
|
||||||
|
consstructor(options = {}) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
// STATIC
|
// STATIC
|
||||||
public static async fromFsDirPath(pathArg: string): Promise<VirtualDirectory> {
|
public static async fromFsDirPath(pathArg: string): Promise<VirtualDirectory> {
|
||||||
const newVirtualDir = new VirtualDirectory();
|
const newVirtualDir = new VirtualDirectory();
|
||||||
@ -18,17 +28,17 @@ export class VirtualDirectory {
|
|||||||
): Promise<VirtualDirectory> {
|
): Promise<VirtualDirectory> {
|
||||||
const newVirtualDir = new VirtualDirectory();
|
const newVirtualDir = new VirtualDirectory();
|
||||||
for (const fileArg of virtualDirTransferableObjectArg.files) {
|
for (const fileArg of virtualDirTransferableObjectArg.files) {
|
||||||
newVirtualDir.addSmartfiles([Smartfile.enfoldFromJson(fileArg) as Smartfile]);
|
newVirtualDir.addSmartfiles([SmartFile.enfoldFromJson(fileArg) as SmartFile]);
|
||||||
}
|
}
|
||||||
return newVirtualDir;
|
return newVirtualDir;
|
||||||
}
|
}
|
||||||
|
|
||||||
// INSTANCE
|
// INSTANCE
|
||||||
public smartfileArray: Smartfile[] = [];
|
public smartfileArray: SmartFile[] = [];
|
||||||
|
|
||||||
constructor() {}
|
constructor() {}
|
||||||
|
|
||||||
public addSmartfiles(smartfileArrayArg: Smartfile[]) {
|
public addSmartfiles(smartfileArrayArg: SmartFile[]) {
|
||||||
this.smartfileArray = this.smartfileArray.concat(smartfileArrayArg);
|
this.smartfileArray = this.smartfileArray.concat(smartfileArrayArg);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -47,7 +57,7 @@ export class VirtualDirectory {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public async saveToDisk(dirArg: string) {
|
public async saveToDisk(dirArg: string) {
|
||||||
console.log(`writing VirtualDirectory with ${this.smartfileArray.length} to directory:
|
console.log(`writing VirtualDirectory with ${this.smartfileArray.length} files to directory:
|
||||||
--> ${dirArg}`);
|
--> ${dirArg}`);
|
||||||
for (const smartfileArg of this.smartfileArray) {
|
for (const smartfileArg of this.smartfileArray) {
|
||||||
const filePath = await smartfileArg.writeToDir(dirArg);
|
const filePath = await smartfileArg.writeToDir(dirArg);
|
||||||
@ -56,6 +66,22 @@ export class VirtualDirectory {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO implement root shifting to get subdirectories as new virtual directories
|
public async shiftToSubdirectory(subDir: string): Promise<VirtualDirectory> {
|
||||||
// TODO implement root shifting to combine VirtualDirecotries in a parent virtual directory
|
const newVirtualDir = new VirtualDirectory();
|
||||||
|
for (const file of this.smartfileArray) {
|
||||||
|
if (file.path.startsWith(subDir)) {
|
||||||
|
const adjustedFilePath = plugins.path.relative(subDir, file.path);
|
||||||
|
file.path = adjustedFilePath;
|
||||||
|
newVirtualDir.addSmartfiles([file]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return newVirtualDir;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async addVirtualDirectory(virtualDir: VirtualDirectory, newRoot: string): Promise<void> {
|
||||||
|
for (const file of virtualDir.smartfileArray) {
|
||||||
|
file.path = plugins.path.join(newRoot, file.path);
|
||||||
|
}
|
||||||
|
this.addSmartfiles(virtualDir.smartfileArray);
|
||||||
|
}
|
||||||
}
|
}
|
@ -1,9 +1,9 @@
|
|||||||
import * as plugins from './smartfile.plugins.js';
|
import * as plugins from './smartfile.plugins.js';
|
||||||
import * as interpreter from './smartfile.interpreter.js';
|
import * as interpreter from './interpreter.js';
|
||||||
|
|
||||||
import { Smartfile } from './smartfile.classes.smartfile.js';
|
import { SmartFile } from './classes.smartfile.js';
|
||||||
|
|
||||||
import * as memory from './smartfile.memory.js';
|
import * as memory from './memory.js';
|
||||||
/*===============================================================
|
/*===============================================================
|
||||||
============================ Checks =============================
|
============================ Checks =============================
|
||||||
===============================================================*/
|
===============================================================*/
|
||||||
@ -40,7 +40,18 @@ export const fileExists = async (filePath): Promise<boolean> => {
|
|||||||
/**
|
/**
|
||||||
* Checks if given path points to an existing directory
|
* Checks if given path points to an existing directory
|
||||||
*/
|
*/
|
||||||
export const isDirectory = (pathArg): boolean => {
|
export const isDirectory = (pathArg: string): boolean => {
|
||||||
|
try {
|
||||||
|
return plugins.fsExtra.statSync(pathArg).isDirectory();
|
||||||
|
} catch (err) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if given path points to an existing directory
|
||||||
|
*/
|
||||||
|
export const isDirectorySync = (pathArg: string): boolean => {
|
||||||
try {
|
try {
|
||||||
return plugins.fsExtra.statSync(pathArg).isDirectory();
|
return plugins.fsExtra.statSync(pathArg).isDirectory();
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@ -210,11 +221,23 @@ export const toBufferSync = (filePath: string): Buffer => {
|
|||||||
return plugins.fsExtra.readFileSync(filePath);
|
return plugins.fsExtra.readFileSync(filePath);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a Readable Stream from a file path.
|
||||||
|
* @param filePath The path to the file.
|
||||||
|
* @returns {fs.ReadStream}
|
||||||
|
*/
|
||||||
|
export const toReadStream = (filePath: string): plugins.fs.ReadStream => {
|
||||||
|
if (!fileExistsSync(filePath)) {
|
||||||
|
throw new Error(`File does not exist at path: ${filePath}`);
|
||||||
|
}
|
||||||
|
return plugins.fsExtra.createReadStream(filePath);
|
||||||
|
};
|
||||||
|
|
||||||
export const fileTreeToHash = async (dirPathArg: string, miniMatchFilter: string) => {
|
export const fileTreeToHash = async (dirPathArg: string, miniMatchFilter: string) => {
|
||||||
const fileTreeObject = await fileTreeToObject(dirPathArg, miniMatchFilter);
|
const fileTreeObject = await fileTreeToObject(dirPathArg, miniMatchFilter);
|
||||||
let combinedString = '';
|
let combinedString = '';
|
||||||
for (const smartfile of fileTreeObject) {
|
for (const smartfile of fileTreeObject) {
|
||||||
combinedString += smartfile.contentBuffer.toString();
|
combinedString += await smartfile.getHash();
|
||||||
}
|
}
|
||||||
const hash = await plugins.smarthash.sha256FromString(combinedString);
|
const hash = await plugins.smarthash.sha256FromString(combinedString);
|
||||||
return hash;
|
return hash;
|
||||||
@ -235,7 +258,7 @@ export const fileTreeToObject = async (dirPathArg: string, miniMatchFilter: stri
|
|||||||
}
|
}
|
||||||
|
|
||||||
const fileTree = await listFileTree(dirPath, miniMatchFilter);
|
const fileTree = await listFileTree(dirPath, miniMatchFilter);
|
||||||
const smartfileArray: Smartfile[] = [];
|
const smartfileArray: SmartFile[] = [];
|
||||||
for (const filePath of fileTree) {
|
for (const filePath of fileTree) {
|
||||||
const readPath = ((): string => {
|
const readPath = ((): string => {
|
||||||
if (!plugins.path.isAbsolute(filePath)) {
|
if (!plugins.path.isAbsolute(filePath)) {
|
||||||
@ -244,12 +267,12 @@ export const fileTreeToObject = async (dirPathArg: string, miniMatchFilter: stri
|
|||||||
return filePath;
|
return filePath;
|
||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
const fileContentString = toStringSync(readPath);
|
const fileBuffer = plugins.fs.readFileSync(readPath);
|
||||||
|
|
||||||
// push a read file as Smartfile
|
// push a read file as Smartfile
|
||||||
smartfileArray.push(
|
smartfileArray.push(
|
||||||
new Smartfile({
|
new SmartFile({
|
||||||
contentBuffer: Buffer.from(fileContentString),
|
contentBuffer: fileBuffer,
|
||||||
base: dirPath,
|
base: dirPath,
|
||||||
path: filePath,
|
path: filePath,
|
||||||
})
|
})
|
||||||
@ -341,8 +364,6 @@ export const listFileTree = async (
|
|||||||
miniMatchFilter: string,
|
miniMatchFilter: string,
|
||||||
absolutePathsBool: boolean = false
|
absolutePathsBool: boolean = false
|
||||||
): Promise<string[]> => {
|
): Promise<string[]> => {
|
||||||
const done = plugins.smartpromise.defer<string[]>();
|
|
||||||
|
|
||||||
// handle absolute miniMatchFilter
|
// handle absolute miniMatchFilter
|
||||||
let dirPath: string;
|
let dirPath: string;
|
||||||
if (plugins.path.isAbsolute(miniMatchFilter)) {
|
if (plugins.path.isAbsolute(miniMatchFilter)) {
|
||||||
@ -356,15 +377,8 @@ export const listFileTree = async (
|
|||||||
nodir: true,
|
nodir: true,
|
||||||
dot: true,
|
dot: true,
|
||||||
};
|
};
|
||||||
plugins.glob(miniMatchFilter, options, (err, files: string[]) => {
|
|
||||||
if (err) {
|
|
||||||
console.log(err);
|
|
||||||
done.reject(err);
|
|
||||||
}
|
|
||||||
done.resolve(files);
|
|
||||||
});
|
|
||||||
|
|
||||||
let fileList = await done.promise;
|
let fileList = await plugins.glob.glob(miniMatchFilter, options);
|
||||||
if (absolutePathsBool) {
|
if (absolutePathsBool) {
|
||||||
fileList = fileList.map((filePath) => {
|
fileList = fileList.map((filePath) => {
|
||||||
return plugins.path.resolve(plugins.path.join(dirPath, filePath));
|
return plugins.path.resolve(plugins.path.join(dirPath, filePath));
|
39
ts/fsstream.ts
Normal file
39
ts/fsstream.ts
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
/*
|
||||||
|
This file contains logic for streaming things from and to the filesystem
|
||||||
|
*/
|
||||||
|
import * as plugins from './smartfile.plugins.js';
|
||||||
|
|
||||||
|
export const createReadStream = (pathArg: string) => {
|
||||||
|
return plugins.fs.createReadStream(pathArg);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const createWriteStream = (pathArg: string) => {
|
||||||
|
return plugins.fs.createWriteStream(pathArg);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const processFile = async (
|
||||||
|
filePath: string,
|
||||||
|
asyncFunc: (fileStream: plugins.stream.Readable) => Promise<void>
|
||||||
|
): Promise<void> => {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const fileStream = createReadStream(filePath);
|
||||||
|
asyncFunc(fileStream).then(resolve).catch(reject);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export const processDirectory = async (
|
||||||
|
directoryPath: string,
|
||||||
|
asyncFunc: (fileStream: plugins.stream.Readable) => Promise<void>
|
||||||
|
): Promise<void> => {
|
||||||
|
const files = plugins.fs.readdirSync(directoryPath, { withFileTypes: true });
|
||||||
|
|
||||||
|
for (const file of files) {
|
||||||
|
const fullPath = plugins.path.join(directoryPath, file.name);
|
||||||
|
|
||||||
|
if (file.isDirectory()) {
|
||||||
|
await processDirectory(fullPath, asyncFunc); // Recursively call processDirectory for directories
|
||||||
|
} else if (file.isFile()) {
|
||||||
|
await processFile(fullPath, asyncFunc); // Call async function with the file stream and wait for it
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
13
ts/index.ts
13
ts/index.ts
@ -1,11 +1,12 @@
|
|||||||
import * as plugins from './smartfile.plugins.js';
|
import * as plugins from './smartfile.plugins.js';
|
||||||
import * as fsMod from './smartfile.fs.js';
|
import * as fsMod from './fs.js';
|
||||||
import * as fsStreamMod from './smartfile.fsstream.js';
|
import * as fsStreamMod from './fsstream.js';
|
||||||
import * as interpreterMod from './smartfile.interpreter.js';
|
import * as interpreterMod from './interpreter.js';
|
||||||
import * as memoryMod from './smartfile.memory.js';
|
import * as memoryMod from './memory.js';
|
||||||
|
|
||||||
export * from './smartfile.classes.smartfile.js';
|
export * from './classes.smartfile.js';
|
||||||
export * from './smartfile.classes.virtualdirectory.js';
|
export * from './classes.streamfile.js';
|
||||||
|
export * from './classes.virtualdirectory.js';
|
||||||
|
|
||||||
export const fs = fsMod;
|
export const fs = fsMod;
|
||||||
export const fsStream = fsStreamMod;
|
export const fsStream = fsStreamMod;
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import * as plugins from './smartfile.plugins.js';
|
import * as plugins from './smartfile.plugins.js';
|
||||||
import { Smartfile } from './smartfile.classes.smartfile.js';
|
import { SmartFile } from './classes.smartfile.js';
|
||||||
import * as smartfileFs from './smartfile.fs.js';
|
import * as smartfileFs from './fs.js';
|
||||||
import * as interpreter from './smartfile.interpreter.js';
|
import * as interpreter from './interpreter.js';
|
||||||
|
import type { StreamFile } from './classes.streamfile.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* converts file to Object
|
* converts file to Object
|
||||||
@ -24,7 +25,7 @@ export interface IToFsOptions {
|
|||||||
* @param fileBaseArg
|
* @param fileBaseArg
|
||||||
*/
|
*/
|
||||||
export let toFs = async (
|
export let toFs = async (
|
||||||
fileContentArg: string | Buffer | Smartfile,
|
fileContentArg: string | Buffer | SmartFile | StreamFile,
|
||||||
filePathArg: string,
|
filePathArg: string,
|
||||||
optionsArg: IToFsOptions = {}
|
optionsArg: IToFsOptions = {}
|
||||||
) => {
|
) => {
|
||||||
@ -41,7 +42,7 @@ export let toFs = async (
|
|||||||
let filePath: string = filePathArg;
|
let filePath: string = filePathArg;
|
||||||
|
|
||||||
// handle Smartfile
|
// handle Smartfile
|
||||||
if (fileContentArg instanceof Smartfile) {
|
if (fileContentArg instanceof SmartFile) {
|
||||||
fileContent = fileContentArg.contentBuffer;
|
fileContent = fileContentArg.contentBuffer;
|
||||||
// handle options
|
// handle options
|
||||||
if (optionsArg.respectRelative) {
|
if (optionsArg.respectRelative) {
|
||||||
@ -83,7 +84,7 @@ export const toFsSync = (fileArg: string, filePathArg: string) => {
|
|||||||
plugins.fsExtra.writeFileSync(filePath, fileString, { encoding: 'utf8' });
|
plugins.fsExtra.writeFileSync(filePath, fileString, { encoding: 'utf8' });
|
||||||
};
|
};
|
||||||
|
|
||||||
export let smartfileArrayToFs = async (smartfileArrayArg: Smartfile[], dirArg: string) => {
|
export let smartfileArrayToFs = async (smartfileArrayArg: SmartFile[], dirArg: string) => {
|
||||||
await smartfileFs.ensureDir(dirArg);
|
await smartfileFs.ensureDir(dirArg);
|
||||||
for (const smartfile of smartfileArrayArg) {
|
for (const smartfile of smartfileArrayArg) {
|
||||||
await toFs(smartfile, dirArg, {
|
await toFs(smartfile, dirArg, {
|
@ -1,17 +0,0 @@
|
|||||||
/*
|
|
||||||
This file contains logic for streaming things from and to the filesystem
|
|
||||||
*/
|
|
||||||
import * as plugins from './smartfile.plugins.js';
|
|
||||||
|
|
||||||
export const createReadStream = (pathArg: string) => {
|
|
||||||
return plugins.fs.createReadStream(pathArg);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const createWriteStream = (pathArg: string) => {
|
|
||||||
return plugins.fs.createWriteStream(pathArg);
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
export const streamDirectory = async (dirPathArg: string) => {
|
|
||||||
|
|
||||||
}
|
|
@ -1,20 +1,21 @@
|
|||||||
// node native scope
|
// node native scope
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
|
import * as stream from 'stream';
|
||||||
|
|
||||||
export { fs, path };
|
export { fs, path, stream };
|
||||||
|
|
||||||
// @pushrocks scope
|
// @pushrocks scope
|
||||||
import * as lik from '@pushrocks/lik';
|
import * as lik from '@push.rocks/lik';
|
||||||
import * as smartfileInterfaces from '@pushrocks/smartfile-interfaces';
|
import * as smartfileInterfaces from '@push.rocks/smartfile-interfaces';
|
||||||
import * as smartdelay from '@pushrocks/smartdelay';
|
import * as smartdelay from '@push.rocks/smartdelay';
|
||||||
import * as smarthash from '@pushrocks/smarthash';
|
import * as smarthash from '@push.rocks/smarthash';
|
||||||
import * as smartjson from '@pushrocks/smartjson';
|
import * as smartjson from '@push.rocks/smartjson';
|
||||||
import * as smartmime from '@pushrocks/smartmime';
|
import * as smartmime from '@push.rocks/smartmime';
|
||||||
import * as smartpath from '@pushrocks/smartpath';
|
import * as smartpath from '@push.rocks/smartpath';
|
||||||
import * as smartpromise from '@pushrocks/smartpromise';
|
import * as smartpromise from '@push.rocks/smartpromise';
|
||||||
import * as smartrequest from '@pushrocks/smartrequest';
|
import * as smartrequest from '@push.rocks/smartrequest';
|
||||||
import * as smartstream from '@pushrocks/smartstream';
|
import * as smartstream from '@push.rocks/smartstream';
|
||||||
|
|
||||||
export {
|
export {
|
||||||
lik,
|
lik,
|
||||||
@ -26,12 +27,12 @@ export {
|
|||||||
smartpath,
|
smartpath,
|
||||||
smartpromise,
|
smartpromise,
|
||||||
smartrequest,
|
smartrequest,
|
||||||
smartstream
|
smartstream,
|
||||||
};
|
};
|
||||||
|
|
||||||
// third party scope
|
// third party scope
|
||||||
import fsExtra from 'fs-extra';
|
import fsExtra from 'fs-extra';
|
||||||
import glob from 'glob';
|
import * as glob from 'glob';
|
||||||
import yaml from 'js-yaml';
|
import yaml from 'js-yaml';
|
||||||
|
|
||||||
export { fsExtra, glob, yaml };
|
export { fsExtra, glob, yaml };
|
||||||
|
@ -1,8 +1,11 @@
|
|||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"experimentalDecorators": true,
|
"experimentalDecorators": true,
|
||||||
|
"useDefineForClassFields": false,
|
||||||
|
"target": "ES2022",
|
||||||
|
"module": "ES2022",
|
||||||
|
"moduleResolution": "nodenext",
|
||||||
"esModuleInterop": true,
|
"esModuleInterop": true,
|
||||||
"target": "ES2017",
|
"verbatimModuleSyntax": true,
|
||||||
"moduleResolution": "node"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user