Compare commits

...

105 Commits

Author SHA1 Message Date
5a9e1b5798 11.0.5 2024-04-01 17:46:40 +02:00
b1ec86ee2d fix(core): update 2024-04-01 17:46:40 +02:00
a1353170f6 update npmextra.json: githost 2024-03-30 21:47:12 +01:00
3ff6de201d 11.0.4 2023-11-24 19:29:00 +01:00
f5c106b2ca fix(core): update 2023-11-24 19:28:59 +01:00
d3c26d0d46 11.0.3 2023-11-24 19:15:41 +01:00
9935fe2d3c fix(core): update 2023-11-24 19:15:41 +01:00
3b05aab39b 11.0.2 2023-11-07 21:32:00 +01:00
53be2eb59d fix(core): update 2023-11-07 21:32:00 +01:00
c92a0dddbd 11.0.1 2023-11-07 14:09:49 +01:00
27403a73b5 fix(core): update 2023-11-07 14:09:48 +01:00
b925e5e662 11.0.0 2023-11-06 11:15:12 +01:00
98a5d2c94d BREAKING CHANGE(core): update 2023-11-06 11:15:11 +01:00
0e735cba20 10.0.40 2023-11-04 20:54:14 +01:00
f815457801 fix(core): update 2023-11-04 20:54:13 +01:00
f7e47ae354 10.0.39 2023-11-04 20:43:55 +01:00
684e893801 fix(core): update 2023-11-04 20:43:54 +01:00
d4b381d33d 10.0.38 2023-11-04 20:14:21 +01:00
291a11aa60 fix(core): update 2023-11-04 20:14:20 +01:00
ca592afec9 update 2023-11-04 20:07:43 +01:00
8b07197224 10.0.37 2023-11-03 02:31:57 +01:00
b60fd15ec6 fix(core): update 2023-11-03 02:31:57 +01:00
853eccc780 10.0.36 2023-11-03 02:24:37 +01:00
c26aff85b5 fix(core): update 2023-11-03 02:24:36 +01:00
321e4d9dea 10.0.35 2023-11-03 01:25:38 +01:00
3d2789857c fix(core): update 2023-11-03 01:25:37 +01:00
07b88a078d 10.0.34 2023-11-03 00:41:06 +01:00
6fee0028d8 fix(core): update 2023-11-03 00:41:05 +01:00
629c52f9bc 10.0.33 2023-10-12 02:21:40 +02:00
fd056c29e9 fix(core): update 2023-10-12 02:21:39 +02:00
36c456b509 10.0.32 2023-09-22 17:05:35 +02:00
16f8c25557 fix(core): update 2023-09-22 17:05:35 +02:00
219e070ba2 10.0.31 2023-08-31 18:45:24 +02:00
ee97e1d88b fix(core): update 2023-08-31 18:45:23 +02:00
279db74568 10.0.30 2023-08-23 10:58:38 +02:00
b84c504f11 fix(core): update 2023-08-23 10:58:38 +02:00
7b3194cc13 10.0.29 2023-08-23 09:38:49 +02:00
e1e821efec fix(core): update 2023-08-23 09:38:49 +02:00
6b613d1b8a 10.0.28 2023-07-12 10:00:40 +02:00
70f1c58a82 fix(core): update 2023-07-12 10:00:40 +02:00
5df76ca94b 10.0.27 2023-07-10 23:07:51 +02:00
32cfda3c90 fix(core): update 2023-07-10 23:07:50 +02:00
dd521398ea switch to new org scheme 2023-07-10 02:55:52 +02:00
038e6cc33d 10.0.26 2023-07-08 16:24:53 +02:00
2fc37d6892 fix(core): update 2023-07-08 16:24:53 +02:00
3c1eb1ab70 10.0.25 2023-06-25 19:01:11 +02:00
5296e8859b fix(core): update 2023-06-25 19:01:10 +02:00
160e0ae451 10.0.24 2023-06-25 18:21:13 +02:00
373c6538ae fix(core): update 2023-06-25 18:21:12 +02:00
7a1476e106 10.0.23 2023-06-25 18:06:56 +02:00
b2a2035f00 fix(core): update 2023-06-25 18:06:56 +02:00
03e4f03035 10.0.22 2023-06-25 17:53:44 +02:00
d74bbb2b12 fix(core): update 2023-06-25 17:53:43 +02:00
22cfe1f5cb 10.0.21 2023-06-25 17:47:42 +02:00
5fc2c4586d fix(core): update 2023-06-25 17:47:42 +02:00
6ab81fb323 10.0.20 2023-06-25 13:55:04 +02:00
58ec27a1a0 fix(core): update 2023-06-25 13:55:03 +02:00
88811646b7 10.0.19 2023-06-25 01:36:06 +02:00
412bb52eee fix(core): update 2023-06-25 01:36:05 +02:00
b04750ecbd 10.0.18 2023-06-25 01:34:41 +02:00
0c99475888 fix(core): update 2023-06-25 01:34:40 +02:00
86317def88 10.0.17 2023-06-24 20:36:53 +02:00
0a5af0ba96 fix(core): update 2023-06-24 20:36:52 +02:00
cf73ff4a54 10.0.16 2023-06-24 11:20:50 +02:00
788897e765 fix(core): update 2023-06-24 11:20:50 +02:00
7fa3894f6e 10.0.15 2023-06-24 01:26:08 +02:00
afdd654664 fix(core): update 2023-06-24 01:26:08 +02:00
8277e0ca6d 10.0.14 2023-06-23 18:49:56 +02:00
0892c87a68 fix(core): update 2023-06-23 18:49:56 +02:00
841ba2e14d 10.0.13 2023-06-23 18:46:43 +02:00
5b2953bf02 fix(core): update 2023-06-23 18:46:42 +02:00
d3457fd65b 10.0.12 2023-06-23 18:43:50 +02:00
3bf1eafe6b fix(core): update 2023-06-23 18:43:49 +02:00
04a9b992d7 10.0.11 2023-06-23 18:39:02 +02:00
6206b55deb fix(core): update 2023-06-23 18:39:01 +02:00
6c05bf6ae3 10.0.10 2023-06-23 18:11:04 +02:00
371074afc1 fix(core): update 2023-06-23 18:11:04 +02:00
e0e665fe6d 10.0.9 2023-06-23 18:08:16 +02:00
5483202972 fix(core): update 2023-06-23 18:08:15 +02:00
24b3458888 10.0.8 2023-06-23 16:44:45 +02:00
cffa47ac3d fix(core): update 2023-06-23 16:44:44 +02:00
9dadf3f78f 10.0.7 2023-01-09 15:34:06 +01:00
b35a671fe9 fix(core): update 2023-01-09 15:34:05 +01:00
0ca38c109e 10.0.6 2023-01-09 15:32:37 +01:00
4e2321e1ee fix(core): update 2023-01-09 15:32:37 +01:00
a640ab3d7b 10.0.5 2022-09-05 00:21:43 +02:00
37d6d56287 fix(core): update 2022-09-05 00:21:42 +02:00
443a026502 10.0.4 2022-07-24 23:11:41 +02:00
9644c5b7e3 fix(core): update 2022-07-24 23:11:41 +02:00
25faa8c697 10.0.3 2022-07-24 23:04:52 +02:00
982387aaa3 fix(core): update 2022-07-24 23:04:51 +02:00
4a11f50efe 10.0.2 2022-06-09 19:27:00 +02:00
0ddec29392 fix(core): update 2022-06-09 19:26:59 +02:00
df484d54e8 10.0.1 2022-06-07 15:50:48 +02:00
f637c20241 fix(core): update 2022-06-07 15:50:47 +02:00
ed0c1a9181 10.0.0 2022-06-07 15:43:28 +02:00
0e22999f69 BREAKING CHANGE(core): switch to esm 2022-06-07 15:43:28 +02:00
38f001ab23 9.0.7 2022-06-07 15:11:22 +02:00
d1429c5a41 fix(core): update 2022-06-07 15:11:21 +02:00
403a0f4fae 9.0.6 2022-03-11 09:46:54 +01:00
91ade8a4d4 fix(updated repo structure): update 2022-03-11 09:46:54 +01:00
1a571bba90 9.0.5 2021-12-22 19:08:54 +01:00
bcb66b2ccf fix(core): update 2021-12-22 19:08:53 +01:00
0000984e4b 9.0.4 2021-12-20 15:11:22 +01:00
b391e54083 fix(core): update 2021-12-20 15:11:21 +01:00
28 changed files with 6823 additions and 27378 deletions

View File

@ -0,0 +1,66 @@
name: Default (not tags)
on:
push:
tags-ignore:
- '**'
env:
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
jobs:
security:
runs-on: ubuntu-latest
continue-on-error: true
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Install pnpm and npmci
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
- name: Run npm prepare
run: npmci npm prepare
- name: Audit production dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --prod
continue-on-error: true
- name: Audit development dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --dev
continue-on-error: true
test:
if: ${{ always() }}
needs: security
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Test stable
run: |
npmci node install stable
npmci npm install
npmci npm test
- name: Test build
run: |
npmci node install stable
npmci npm install
npmci npm build

View File

@ -0,0 +1,124 @@
name: Default (tags)
on:
push:
tags:
- '*'
env:
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
jobs:
security:
runs-on: ubuntu-latest
continue-on-error: true
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
npmci npm prepare
- name: Audit production dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --prod
continue-on-error: true
- name: Audit development dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --dev
continue-on-error: true
test:
if: ${{ always() }}
needs: security
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
npmci npm prepare
- name: Test stable
run: |
npmci node install stable
npmci npm install
npmci npm test
- name: Test build
run: |
npmci node install stable
npmci npm install
npmci npm build
release:
needs: test
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
npmci npm prepare
- name: Release
run: |
npmci node install stable
npmci npm publish
metadata:
needs: test
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
continue-on-error: true
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
npmci npm prepare
- name: Code quality
run: |
npmci command npm install -g typescript
npmci npm install
- name: Trigger
run: npmci trigger
- name: Build docs and upload artifacts
run: |
npmci node install stable
npmci npm install
pnpm install -g @git.zone/tsdoc
npmci command tsdoc
continue-on-error: true

View File

@ -1,137 +0,0 @@
# gitzone ci_default
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
cache:
paths:
- .npmci_cache/
key: '$CI_BUILD_STAGE'
stages:
- security
- test
- release
- metadata
# ====================
# security stage
# ====================
mirror:
stage: security
script:
- npmci git mirror
only:
- tags
tags:
- lossless
- docker
- notpriv
auditProductionDependencies:
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
stage: security
script:
- npmci npm prepare
- npmci command npm install --production --ignore-scripts
- npmci command npm config set registry https://registry.npmjs.org
- npmci command npm audit --audit-level=high --only=prod --production
tags:
- docker
auditDevDependencies:
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
stage: security
script:
- npmci npm prepare
- npmci command npm install --ignore-scripts
- npmci command npm config set registry https://registry.npmjs.org
- npmci command npm audit --audit-level=high --only=dev
tags:
- docker
allow_failure: true
# ====================
# test stage
# ====================
testStable:
stage: test
script:
- npmci npm prepare
- npmci node install stable
- npmci npm install
- npmci npm test
coverage: /\d+.?\d+?\%\s*coverage/
tags:
- docker
testBuild:
stage: test
script:
- npmci npm prepare
- npmci node install stable
- npmci npm install
- npmci command npm run build
coverage: /\d+.?\d+?\%\s*coverage/
tags:
- docker
release:
stage: release
script:
- npmci node install stable
- npmci npm publish
only:
- tags
tags:
- lossless
- docker
- notpriv
# ====================
# metadata stage
# ====================
codequality:
stage: metadata
allow_failure: true
only:
- tags
script:
- npmci command npm install -g tslint typescript
- npmci npm prepare
- npmci npm install
- npmci command "tslint -c tslint.json ./ts/**/*.ts"
tags:
- lossless
- docker
- priv
trigger:
stage: metadata
script:
- npmci trigger
only:
- tags
tags:
- lossless
- docker
- notpriv
pages:
stage: metadata
script:
- npmci node install lts
- npmci command npm install -g @gitzone/tsdoc
- npmci npm prepare
- npmci npm install
- npmci command tsdoc
tags:
- lossless
- docker
- notpriv
only:
- tags
artifacts:
expire_in: 1 week
paths:
- public
allow_failure: true

4
.snyk
View File

@ -1,4 +0,0 @@
# Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities.
version: v1.13.1
ignore: {}
patch: {}

24
.vscode/launch.json vendored
View File

@ -2,28 +2,10 @@
"version": "0.2.0",
"configurations": [
{
"name": "current file",
"type": "node",
"command": "npm test",
"name": "Run npm test",
"request": "launch",
"args": [
"${relativeFile}"
],
"runtimeArgs": ["-r", "@gitzone/tsrun"],
"cwd": "${workspaceRoot}",
"protocol": "inspector",
"internalConsoleOptions": "openOnSessionStart"
},
{
"name": "test.ts",
"type": "node",
"request": "launch",
"args": [
"test/test.ts"
],
"runtimeArgs": ["-r", "@gitzone/tsrun"],
"cwd": "${workspaceRoot}",
"protocol": "inspector",
"internalConsoleOptions": "openOnSessionStart"
"type": "node-terminal"
}
]
}

View File

View File

@ -1,8 +1,4 @@
{
"npmts": {
"mode": "default",
"coverageTreshold": 70
},
"npmdocker": {},
"npmci": {
"npmGlobalTools": [],
@ -11,11 +7,11 @@
"gitzone": {
"projectType": "npm",
"module": {
"githost": "gitlab.com",
"gitscope": "pushrocks",
"githost": "code.foss.global",
"gitscope": "push.rocks",
"gitrepo": "smartfile",
"shortDescription": "smart ways to work with files in nodejs",
"npmPackagename": "@pushrocks/smartfile",
"description": "smart ways to work with files in nodejs",
"npmPackagename": "@push.rocks/smartfile",
"license": "MIT"
}
}

26989
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,17 +1,19 @@
{
"name": "@pushrocks/smartfile",
"name": "@push.rocks/smartfile",
"private": false,
"version": "9.0.3",
"version": "11.0.5",
"description": "offers smart ways to work with files in nodejs",
"main": "dist_ts/index.js",
"typings": "dist_ts/index.d.ts",
"type": "module",
"scripts": {
"test": "(tstest test/)",
"build": "(tsbuild --web)"
"build": "(tsbuild --web --allowimplicitany)",
"buildDocs": "tsdoc"
},
"repository": {
"type": "git",
"url": "https://gitlab.com/pushrocks/smartfile.git"
"url": "git+https://gitlab.com/push.rocks/smartfile.git"
},
"keywords": [
"filesystem",
@ -20,34 +22,33 @@
"author": "Lossless GmbH <hello@lossless.com> (https://lossless.com)",
"license": "MIT",
"bugs": {
"url": "https://gitlab.com/pushrocks/smartfile/issues"
"url": "https://gitlab.com/push.rocks/smartfile/issues"
},
"homepage": "https://gitlab.com/pushrocks/smartfile",
"homepage": "https://gitlab.com/push.rocks/smartfile#readme",
"dependencies": {
"@pushrocks/lik": "^5.0.0",
"@pushrocks/smartfile-interfaces": "^1.0.7",
"@pushrocks/smarthash": "^2.1.10",
"@pushrocks/smartjson": "^4.0.6",
"@pushrocks/smartmime": "^1.0.5",
"@pushrocks/smartpath": "^4.0.3",
"@pushrocks/smartpromise": "^3.1.6",
"@pushrocks/smartrequest": "^1.1.52",
"@types/fs-extra": "^9.0.13",
"@types/glob": "^7.2.0",
"@types/js-yaml": "^4.0.5",
"fs-extra": "^10.0.0",
"glob": "^7.2.0",
"@push.rocks/lik": "^6.0.12",
"@push.rocks/smartdelay": "^3.0.5",
"@push.rocks/smartfile-interfaces": "^1.0.7",
"@push.rocks/smarthash": "^3.0.4",
"@push.rocks/smartjson": "^5.0.10",
"@push.rocks/smartmime": "^1.0.5",
"@push.rocks/smartpath": "^5.0.11",
"@push.rocks/smartpromise": "^4.0.2",
"@push.rocks/smartrequest": "^2.0.21",
"@push.rocks/smartstream": "^3.0.30",
"@types/fs-extra": "^11.0.4",
"@types/glob": "^8.1.0",
"@types/js-yaml": "^4.0.9",
"fs-extra": "^11.1.1",
"glob": "^10.3.10",
"js-yaml": "^4.1.0"
},
"devDependencies": {
"@gitzone/tsbuild": "^2.1.28",
"@gitzone/tsrun": "^1.2.18",
"@gitzone/tstest": "^1.0.60",
"@pushrocks/tapbundle": "^3.2.14",
"@types/node": "^16.11.11",
"gulp-function": "^2.2.14",
"tslint": "^6.1.3",
"tslint-config-prettier": "^1.18.0"
"@git.zone/tsbuild": "^2.1.70",
"@git.zone/tsrun": "^1.2.46",
"@git.zone/tstest": "^1.0.84",
"@push.rocks/tapbundle": "^5.0.15",
"@types/node": "^20.10.0"
},
"files": [
"ts/**/*",

5735
pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

151
readme.md
View File

@ -1,51 +1,126 @@
# @pushrocks/smartfile
smart ways to work with files in nodejs
# SmartFile
> SmartFile offers smart ways to work with files in nodejs.
## Availabililty and Links
* [npmjs.org (npm package)](https://www.npmjs.com/package/@pushrocks/smartfile)
* [gitlab.com (source)](https://gitlab.com/pushrocks/smartfile)
* [github.com (source mirror)](https://github.com/pushrocks/smartfile)
* [docs (typedoc)](https://pushrocks.gitlab.io/smartfile/)
## Install
To install SmartFile, use npm or Yarn as follows:
## Status for master
Status Category | Status Badge
-- | --
GitLab Pipelines | [![pipeline status](https://gitlab.com/pushrocks/smartfile/badges/master/pipeline.svg)](https://lossless.cloud)
GitLab Pipline Test Coverage | [![coverage report](https://gitlab.com/pushrocks/smartfile/badges/master/coverage.svg)](https://lossless.cloud)
npm | [![npm downloads per month](https://badgen.net/npm/dy/@pushrocks/smartfile)](https://lossless.cloud)
Snyk | [![Known Vulnerabilities](https://badgen.net/snyk/pushrocks/smartfile)](https://lossless.cloud)
TypeScript Support | [![TypeScript](https://badgen.net/badge/TypeScript/>=%203.x/blue?icon=typescript)](https://lossless.cloud)
node Support | [![node](https://img.shields.io/badge/node->=%2010.x.x-blue.svg)](https://nodejs.org/dist/latest-v10.x/docs/api/)
Code Style | [![Code Style](https://badgen.net/badge/style/prettier/purple)](https://lossless.cloud)
PackagePhobia (total standalone install weight) | [![PackagePhobia](https://badgen.net/packagephobia/install/@pushrocks/smartfile)](https://lossless.cloud)
PackagePhobia (package size on registry) | [![PackagePhobia](https://badgen.net/packagephobia/publish/@pushrocks/smartfile)](https://lossless.cloud)
BundlePhobia (total size when bundled) | [![BundlePhobia](https://badgen.net/bundlephobia/minzip/@pushrocks/smartfile)](https://lossless.cloud)
Platform support | [![Supports Windows 10](https://badgen.net/badge/supports%20Windows%2010/yes/green?icon=windows)](https://lossless.cloud) [![Supports Mac OS X](https://badgen.net/badge/supports%20Mac%20OS%20X/yes/green?icon=apple)](https://lossless.cloud)
```
npm install @push.rocks/smartfile --save
```
Or:
```
yarn add @push.rocks/smartfile
```
## Usage
smartfile is an approach of being one tool to handle files in diverse environments.
SmartFile is a comprehensive toolkit for file manipulation in Node.js. It provides functionalities for working with the filesystem, in-memory operations, streaming, and handling virtual directories. Below, you will find examples showcasing how to utilize these functionalities effectively.
### Smartfile Sections
### Basic File Operations
smartfile thinks in sections:
For reading and writing files, SmartFile provides synchronous and asynchronous methods. Heres how you can use them:
| section | description |
| ----------- | ---------------------------------------------------------------------------- |
| fs | (object) gets data from fs to somewhere |
| memory | gets data from memory to somewhere |
| remote | gets data from remote locations to somewhere |
| interpreter | (object) handles yaml and json |
| smartfile | (class) a virtual representation of a file, alternative to vinyl file format |
#### Async Write to File
## Contribution
```typescript
import { memory } from '@push.rocks/smartfile';
We are always happy for code contributions. If you are not the code contributing type that is ok. Still, maintaining Open Source repositories takes considerable time and thought. If you like the quality of what we do and our modules are useful to you we would appreciate a little monthly contribution: You can [contribute one time](https://lossless.link/contribute-onetime) or [contribute monthly](https://lossless.link/contribute). :)
const myData: string = 'Hello, SmartFile!';
For further information read the linked docs at the top of this readme.
// Writing string data to a file asynchronously
memory.toFs(myData, './data/hello.txt');
```
> MIT licensed | **&copy;** [Lossless GmbH](https://lossless.gmbh)
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy)
#### Sync Write to File
[![repo-footer](https://lossless.gitlab.io/publicrelations/repofooter.svg)](https://maintainedby.lossless.com)
```typescript
import { memory } from '@push.rocks/smartfile';
const myData: string = 'Hello, World!';
// Writing string data to a file synchronously
memory.toFsSync(myData, './data/helloSync.txt');
```
### Working with Streams
Streaming files to and from the filesystem is made easy with SmartFile. Heres an example:
#### Creating Read and Write Streams
```typescript
import { fsStream } from '@push.rocks/smartfile';
import * as fs from 'fs';
// Creating a read stream
const readStream = fsStream.createReadStream('./data/readme.txt');
// Creating a write stream
const writeStream = fsStream.createWriteStream('./data/copy.txt');
// Piping the read stream to the write stream
readStream.pipe(writeStream);
```
### Dealing with Virtual Directories
Virtual directories allow you to group and manipulate files as if they were in a filesystem structure without actually writing them to disk.
```typescript
import { VirtualDirectory } from '@push.rocks/smartfile';
(async () => {
// Creating a virtual directory from the file system
const virtualDir = await VirtualDirectory.fromFsDirPath('./myDirectory');
// Adding files from another virtual directory
const anotherVirtualDir = await VirtualDirectory.fromFsDirPath('./anotherDirectory');
await virtualDir.addVirtualDirectory(anotherVirtualDir, 'merged');
// Saving the virtual directory to disk
await virtualDir.saveToDisk('./outputDirectory');
})();
```
### Advanced File Manipulation
SmartFile also allows for more advanced file manipulation techniques through the `SmartFile` class.
```typescript
import { SmartFile } from '@push.rocks/smartfile';
(async () => {
// Create a SmartFile instance from a file path
const smartFile = await SmartFile.fromFilePath('./data/example.txt');
// Edit the file content
await smartFile.editContentAsString(async (currentContent: string) => {
return currentContent.toUpperCase();
});
// Write the changes back to disk
await smartFile.write();
})();
```
### Conversion and Interpretation
You can easily convert file contents to objects or interpret file types for further processing:
```typescript
import { memory } from '@push.rocks/smartfile';
(async () => {
const fileString: string = await fs.promises.readFile('./data/example.json', 'utf8');
const fileObject = memory.toObject(fileString, 'json');
console.log(fileObject);
// Proceed with the object...
})();
```
SmartFile simplifies handling files in a Node.js environment, providing a concise, promise-based API for various file operations, stream handling, and in-memory file manipulation. Whether you're dealing with physical files on the disk, manipulating file streams, or managing virtual files and directories, SmartFile has got you covered.
## Information on Licensing
SmartFile is licensed under the MIT License. This permissive license is short and to the point. It lets people do anything they want with your code as long as they provide attribution back to you and dont hold you liable.

67
test/test.streamfile.ts Normal file
View File

@ -0,0 +1,67 @@
import * as path from 'path';
import { expect, tap } from '@push.rocks/tapbundle';
import * as smartfile from '../ts/index.js'; // adjust the import path as needed
// Test assets path
const testAssetsPath = './test/testassets/';
// ---------------------------
// StreamFile tests
// ---------------------------
tap.test('StreamFile.fromPath should create a StreamFile from a file path', async () => {
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
expect(streamFile).toBeInstanceOf(smartfile.StreamFile);
const contentBuffer = await streamFile.getContentAsBuffer();
expect(contentBuffer).toBeInstanceOf(Buffer);
});
tap.test('StreamFile.fromUrl should create a StreamFile from a URL', async () => {
const streamFile = await smartfile.StreamFile.fromUrl('http://example.com/somefile.json');
expect(streamFile).toBeInstanceOf(smartfile.StreamFile);
});
tap.test('StreamFile.fromBuffer should create a StreamFile from a Buffer', async () => {
const buffer = Buffer.from('Some content');
const streamFile = smartfile.StreamFile.fromBuffer(buffer, 'bufferfile.txt');
expect(streamFile).toBeInstanceOf(smartfile.StreamFile);
});
tap.test('StreamFile should write the stream to disk', async () => {
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
await streamFile.writeToDisk(path.join(testAssetsPath, 'temp', 'mytest.json'));
// Verify the file was written
expect(
// We'll use the fileExists method from your smartfile library
// Replace with the actual method you use to check file existence
await smartfile.fs.fileExists(path.join(testAssetsPath, 'temp', 'mytest.json'))
).toBeTrue();
});
tap.test('StreamFile should write to a directory', async () => {
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
await streamFile.writeToDir(path.join(testAssetsPath, 'temp'));
// Verify the file was written
expect(
await smartfile.fs.fileExists(path.join(testAssetsPath, 'temp', 'mytest.json'))
).toBeTrue();
});
tap.test('StreamFile should return content as a buffer', async () => {
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
const contentBuffer = await streamFile.getContentAsBuffer();
expect(contentBuffer).toBeInstanceOf(Buffer);
// Further checks on the content can be added here if necessary
});
tap.test('StreamFile should return content as a string', async () => {
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
const contentString = await streamFile.getContentAsString();
expect(typeof contentString).toBeTypeofString();
// Verify the content matches what's expected
// This assumes the file contains a JSON object with a key 'key1' with value 'this works'
expect(JSON.parse(contentString).key1).toEqual('this works');
});
// Start the test sequence
tap.start();

View File

@ -1,7 +1,7 @@
import * as smartfile from '../ts/index';
import * as smartfile from '../ts/index.js';
import * as path from 'path';
import { expect, tap } from '@pushrocks/tapbundle';
import { expect, tap } from '@push.rocks/tapbundle';
// ---------------------------
// smartfile.fs
@ -9,47 +9,45 @@ import { expect, tap } from '@pushrocks/tapbundle';
tap.test('.fs.fileExistsSync -> should return an accurate boolean', async () => {
// tslint:disable-next-line: no-unused-expression
expect(smartfile.fs.fileExistsSync('./test/testassets/mytest.json')).to.be.true;
expect(smartfile.fs.fileExistsSync('./test/testassets/mytest.json')).toBeTrue();
// tslint:disable-next-line: no-unused-expression
expect(smartfile.fs.fileExistsSync('./test/testassets/notthere.json')).be.false;
expect(smartfile.fs.fileExistsSync('./test/testassets/notthere.json')).toBeFalse();
});
tap.test('.fs.fileExists -> should resolve or reject a promise', async () => {
expect(smartfile.fs.fileExists('./test/testassets/mytest.json')).to.be.instanceof(Promise);
expect(smartfile.fs.fileExists('./test/testassets/mytest.json')).toBeInstanceOf(Promise);
await smartfile.fs.fileExists('./test/testassets/mytest.json');
await smartfile.fs.fileExists('./test/testassets/notthere.json').catch((err) => {
return expect(err.message).to.equal(
return expect(err.message).toEqual(
"ENOENT: no such file or directory, access './test/testassets/notthere.json'"
);
});
});
tap.test('.fs.listFoldersSync() -> should get the file type from a string', async () => {
expect(smartfile.fs.listFoldersSync('./test/testassets/')).to.include('testfolder');
expect(smartfile.fs.listFoldersSync('./test/testassets/')).to.not.include('notExistentFolder');
expect(smartfile.fs.listFoldersSync('./test/testassets/')).toContain('testfolder');
expect(smartfile.fs.listFoldersSync('./test/testassets/')).not.toContain('notExistentFolder');
});
tap.test('.fs.listFolders() -> should get the file type from a string', async () => {
const folderArrayArg = await smartfile.fs.listFolders('./test/testassets/');
expect(folderArrayArg).to.include('testfolder');
expect(folderArrayArg).to.not.include('notExistentFolder');
expect(folderArrayArg).toContain('testfolder');
expect(folderArrayArg).not.toContain('notExistentFolder');
});
tap.test('.fs.listFilesSync() -> should get the file type from a string', async () => {
expect(smartfile.fs.listFilesSync('./test/testassets/')).to.include('mytest.json');
expect(smartfile.fs.listFilesSync('./test/testassets/')).to.not.include('notExistentFile');
expect(smartfile.fs.listFilesSync('./test/testassets/', /mytest\.json/)).to.include(
'mytest.json'
);
expect(smartfile.fs.listFilesSync('./test/testassets/', /mytests.json/)).to.not.include(
expect(smartfile.fs.listFilesSync('./test/testassets/')).toContain('mytest.json');
expect(smartfile.fs.listFilesSync('./test/testassets/')).not.toContain('notExistentFile');
expect(smartfile.fs.listFilesSync('./test/testassets/', /mytest\.json/)).toContain('mytest.json');
expect(smartfile.fs.listFilesSync('./test/testassets/', /mytests.json/)).not.toContain(
'mytest.json'
);
});
tap.test('.fs.listFiles() -> should get the file type from a string', async () => {
const folderArrayArg = await smartfile.fs.listFiles('./test/testassets/');
expect(folderArrayArg).to.include('mytest.json');
expect(folderArrayArg).to.not.include('notExistentFile');
expect(folderArrayArg).toContain('mytest.json');
expect(folderArrayArg).not.toContain('notExistentFile');
});
tap.test('.fs.listFileTree() -> should get a file tree', async () => {
@ -57,8 +55,8 @@ tap.test('.fs.listFileTree() -> should get a file tree', async () => {
path.resolve('./test/testassets/'),
'**/*.txt'
);
expect(folderArrayArg).to.include('testfolder/testfile1.txt');
expect(folderArrayArg).to.not.include('mytest.json');
expect(folderArrayArg).toContain('testfolder/testfile1.txt');
expect(folderArrayArg).not.toContain('mytest.json');
});
tap.test('.fs.fileTreeToObject -> should read a file tree into an Object', async () => {
@ -66,8 +64,8 @@ tap.test('.fs.fileTreeToObject -> should read a file tree into an Object', async
path.resolve('./test/testassets/'),
'**/*.txt'
);
expect(fileArrayArg[0]).to.be.instanceof(smartfile.Smartfile);
expect(fileArrayArg[0].contents.toString()).to.equal(fileArrayArg[0].contentBuffer.toString());
expect(fileArrayArg[0]).toBeInstanceOf(smartfile.SmartFile);
expect(fileArrayArg[0].contents.toString()).toEqual(fileArrayArg[0].contentBuffer.toString());
});
tap.test('.fs.copy() -> should copy a directory', async () => {
@ -93,15 +91,15 @@ tap.test('.fs.remove -> should remove single files', async () => {
tap.test('.fs.removeSync -> should remove single files synchronouly', async () => {
smartfile.fs.removeSync('./test/testassets/temp/testfile1.txt');
expect(smartfile.fs.fileExistsSync('./test/testassets/temp/testfile1.txt')).to.be.false;
expect(smartfile.fs.fileExistsSync('./test/testassets/temp/testfile1.txt')).toBeFalse();
});
tap.test('.fs.removeMany -> should remove and array of files', async () => {
smartfile.fs
.removeMany(['./test/testassets/temp/testfile1.txt', './test/testassets/temp/testfile2.txt'])
.then(() => {
expect(smartfile.fs.fileExistsSync('./test/testassets/temp/testfile1.txt')).to.be.false;
expect(smartfile.fs.fileExistsSync('./test/testassets/temp/testfile2.txt')).to.be.false;
expect(smartfile.fs.fileExistsSync('./test/testassets/temp/testfile1.txt')).toBeFalse();
expect(smartfile.fs.fileExistsSync('./test/testassets/temp/testfile2.txt')).toBeFalse();
});
});
@ -110,14 +108,14 @@ tap.test('.fs.removeManySync -> should remove and array of single files synchron
'./test/testassets/temp/testfile1.txt',
'./test/testassets/temp/testfile2.txt',
]);
expect(smartfile.fs.fileExistsSync('./test/testassets/temp/testfile1.txt')).to.be.false;
expect(smartfile.fs.fileExistsSync('./test/testassets/temp/testfile2.txt')).to.be.false;
expect(smartfile.fs.fileExistsSync('./test/testassets/temp/testfile1.txt')).toBeFalse();
expect(smartfile.fs.fileExistsSync('./test/testassets/temp/testfile2.txt')).toBeFalse();
});
tap.test('.fs.toObjectSync() -> should read an ' + '.yaml' + ' file to an object', async () => {
tap.test('.fs.toObjectSync() -> should read an .yaml file to an object', async () => {
const testData = smartfile.fs.toObjectSync('./test/testassets/mytest.yaml');
expect(testData).to.include({ key1: 'this works' });
expect(testData).to.include({ key2: 'this works too' });
expect(testData.key1).toEqual('this works');
expect(testData.key2).toEqual('this works too');
});
tap.test(
'.fs.toObjectSync() -> should state unknown file type for unknown file types',
@ -126,16 +124,14 @@ tap.test(
}
);
tap.test('.fs.toObjectSync() -> should read an ' + '.json' + ' file to an object', async () => {
tap.test('.fs.toObjectSync() -> should read an .json file to an object', async () => {
const testData = smartfile.fs.toObjectSync('./test/testassets/mytest.json');
expect(testData).to.include({ key1: 'this works' });
expect(testData).to.include({ key2: 'this works too' });
expect(testData.key1).toEqual('this works');
expect(testData.key2).toEqual('this works too');
});
tap.test('.fs.toStringSync() -> should read a file to a string', async () => {
expect(smartfile.fs.toStringSync('./test/testassets/mytest.txt')).to.equal(
'Some TestString &&%$'
);
expect(smartfile.fs.toStringSync('./test/testassets/mytest.txt')).toEqual('Some TestString &&%$');
});
// ---------------------------
@ -143,7 +139,7 @@ tap.test('.fs.toStringSync() -> should read a file to a string', async () => {
// ---------------------------
tap.test('.interpreter.filetype() -> should get the file type from a string', async () => {
expect(smartfile.interpreter.filetype('./somefolder/data.json')).equal('json');
expect(smartfile.interpreter.filetype('./somefolder/data.json')).toEqual('json');
});
// ---------------------------
@ -179,14 +175,14 @@ tap.test('.Smartfile -> should produce vinyl compatible files', async () => {
'./test/testassets/testfolder/**/*'
);
const localSmartfile = smartfileArray[0];
expect(localSmartfile).to.be.instanceof(smartfile.Smartfile);
expect(localSmartfile.contents).to.be.instanceof(Buffer);
expect(localSmartfile).toBeInstanceOf(smartfile.SmartFile);
expect(localSmartfile.contents).toBeInstanceOf(Buffer);
// tslint:disable-next-line:no-unused-expression
expect(localSmartfile.isBuffer()).to.be.true;
expect(localSmartfile.isBuffer()).toBeTrue();
// tslint:disable-next-line:no-unused-expression
expect(localSmartfile.isDirectory()).to.be.false;
expect(localSmartfile.isDirectory()).toBeFalse();
// tslint:disable-next-line:no-unused-expression
expect(localSmartfile.isNull()).to.be.false;
expect(localSmartfile.isNull()).toBeFalse();
});
tap.test('should output a smartfile array to disk', async () => {
@ -206,11 +202,23 @@ tap.test('should output a smartfile array to disk', async () => {
tap.test('should create, store and retrieve valid smartfiles', async () => {
const fileString = 'hi there';
const filePath = './test/testassets/utf8.txt';
const smartfileInstance = await smartfile.Smartfile.fromString(filePath, fileString, 'utf8');
const smartfileInstance = await smartfile.SmartFile.fromString(filePath, fileString, 'utf8');
smartfileInstance.write();
const smartfileInstance2 = await smartfile.Smartfile.fromFilePath(filePath);
const smartfileInstance2 = await smartfile.SmartFile.fromFilePath(filePath);
const retrievedString = smartfileInstance.contents.toString();
expect(retrievedString).to.equal(fileString);
expect(retrievedString).toEqual(fileString);
});
tap.test('should get a hash', async () => {
const fileString = 'hi there';
const filePath = './test/testassets/utf8.txt';
const smartfileInstance = await smartfile.SmartFile.fromString(filePath, fileString, 'utf8');
const hash = await smartfileInstance.getHash();
console.log(hash);
});
tap.test('should wait for file to be ready', async () => {
await smartfile.fs.waitForFileToBeReady('./test/testassets/mytest.json');
});
tap.start();

View File

@ -1,10 +1,10 @@
import { tap, expect } from '@pushrocks/tapbundle';
import { tap, expect } from '@push.rocks/tapbundle';
import * as smartfile from '../ts';
import * as smartfile from '../ts/index.js';
tap.test('should create a virtualdirectory', async () => {
const virtualDir = await smartfile.VirtualDirectory.fromFsDirPath('./test/testassets/testfolder');
expect(virtualDir.smartfileArray.length).to.equal(4);
expect(virtualDir.smartfileArray.length).toEqual(4);
});
tap.test('should write to a directory', async () => {
@ -12,4 +12,4 @@ tap.test('should write to a directory', async () => {
virtualDir.saveToDisk('./test/testassets/test');
});
tap.start();
tap.start();

View File

@ -0,0 +1,8 @@
{
"key1": "this works",
"key2": "this works too",
"key3": {
"nestedkey1": "hello"
}
}

View File

@ -0,0 +1,8 @@
{
"key1": "this works",
"key2": "this works too",
"key3": {
"nestedkey1": "hello"
}
}

8
ts/00_commitinfo_data.ts Normal file
View File

@ -0,0 +1,8 @@
/**
* autocreated commitinfo by @pushrocks/commitinfo
*/
export const commitinfo = {
name: '@push.rocks/smartfile',
version: '11.0.5',
description: 'offers smart ways to work with files in nodejs'
}

View File

@ -1,6 +1,6 @@
import * as plugins from './smartfile.plugins';
import * as fs from './smartfile.fs';
import * as memory from './smartfile.memory';
import * as plugins from './smartfile.plugins.js';
import * as fs from './fs.js';
import * as memory from './memory.js';
export interface ISmartfileConstructorOptions {
path: string;
@ -9,10 +9,9 @@ export interface ISmartfileConstructorOptions {
}
/**
* class Smartfile
* -> is vinyl file compatible
* an vinyl file compatible in memory file class
*/
export class Smartfile extends plugins.smartjson.Smartjson {
export class SmartFile extends plugins.smartjson.Smartjson {
// ======
// STATIC
// ======
@ -24,7 +23,7 @@ export class Smartfile extends plugins.smartjson.Smartjson {
public static async fromFilePath(filePath: string, baseArg: string = process.cwd()) {
filePath = plugins.path.resolve(filePath);
const fileBuffer = fs.toBufferSync(filePath);
const smartfile = new Smartfile({
const smartfile = new SmartFile({
contentBuffer: fileBuffer,
base: baseArg,
path: plugins.path.relative(baseArg, filePath),
@ -37,7 +36,7 @@ export class Smartfile extends plugins.smartjson.Smartjson {
contentBufferArg: Buffer,
baseArg: string = process.cwd()
) {
const smartfile = new Smartfile({
const smartfile = new SmartFile({
contentBuffer: contentBufferArg,
base: baseArg,
path: plugins.path.relative(baseArg, filePath),
@ -52,7 +51,7 @@ export class Smartfile extends plugins.smartjson.Smartjson {
encodingArg: 'utf8' | 'binary',
baseArg = process.cwd()
) {
const smartfile = new Smartfile({
const smartfile = new SmartFile({
contentBuffer: Buffer.from(contentStringArg, encodingArg),
base: baseArg,
path: plugins.path.relative(baseArg, filePath),
@ -62,7 +61,40 @@ export class Smartfile extends plugins.smartjson.Smartjson {
}
public static async fromFoldedJson(foldedJsonArg: string) {
return new Smartfile(plugins.smartjson.parse(foldedJsonArg));
return new SmartFile(plugins.smartjson.parse(foldedJsonArg));
}
/**
* creates a Smartfile from a ReadableStream
* @param stream a readable stream that provides file content
* @param filePath the file path to associate with the content
* @param baseArg the base path to use for the file
*/
public static async fromStream(
stream: plugins.stream.Readable,
filePath: string,
baseArg: string = process.cwd()
): Promise<SmartFile> {
return new Promise<SmartFile>((resolve, reject) => {
const chunks: Buffer[] = [];
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
stream.on('error', (error) => reject(error));
stream.on('end', () => {
const contentBuffer = Buffer.concat(chunks);
const smartfile = new SmartFile({
contentBuffer: contentBuffer,
base: baseArg,
path: plugins.path.relative(baseArg, filePath),
});
resolve(smartfile);
});
});
}
public static async fromUrl (urlArg: string) {
const response = await plugins.smartrequest.getBinary(urlArg);
const smartfile = await SmartFile.fromBuffer(urlArg, response.body);
return smartfile;
}
// ========
@ -77,10 +109,10 @@ export class Smartfile extends plugins.smartjson.Smartjson {
/**
* a parsed path
*/
public get parsedPath (): plugins.path.ParsedPath {
public get parsedPath(): plugins.path.ParsedPath {
return plugins.path.parse(this.path);
};
public get absolutePath () {
}
public get absolutePath() {
return plugins.path.join(this.base, this.path);
}
@ -128,7 +160,7 @@ export class Smartfile extends plugins.smartjson.Smartjson {
* @param contentString
*/
public setContentsFromString(contentString: string, encodingArg: 'utf8' | 'binary' = 'utf8') {
this.contents = new Buffer(contentString, encodingArg);
this.contents = Buffer.from(contentString, encodingArg);
}
/**
@ -158,7 +190,7 @@ export class Smartfile extends plugins.smartjson.Smartjson {
* @returns
*/
public async writeToDir(dirPathArg: string) {
dirPathArg = plugins.smartpath.transform.toAbsolute(dirPathArg);
dirPathArg = plugins.smartpath.transform.toAbsolute(dirPathArg) as string;
const filePath = plugins.path.join(dirPathArg, this.path);
await memory.toFs(this.contentBuffer, filePath);
return filePath;
@ -237,9 +269,39 @@ export class Smartfile extends plugins.smartjson.Smartjson {
return false;
}
public async getHash(typeArg: 'path' | 'content' | 'all' = 'all') {
const pathHash = await plugins.smarthash.sha256FromString(this.path);
const contentHash = await plugins.smarthash.sha256FromBuffer(this.contentBuffer);
const combinedHash = await plugins.smarthash.sha256FromString(pathHash + contentHash);
switch (typeArg) {
case 'path':
return pathHash;
case 'content':
return contentHash;
case 'all':
default:
return combinedHash;
}
}
// update things
public updateFileName(fileNameArg: string) {
const oldFileName = this.parsedPath.base;
this.path = this.path.replace(new RegExp(oldFileName + '$'), fileNameArg);
}
public async editContentAsString(editFuncArg: (fileStringArg: string) => Promise<string>) {
const newFileString = await editFuncArg(this.contentBuffer.toString());
this.contentBuffer = Buffer.from(newFileString);
}
/**
* Returns a ReadableStream from the file's content buffer
*/
public getStream(): plugins.stream.Readable {
const stream = new plugins.stream.Readable();
stream.push(this.contentBuffer); // Push the content buffer to the stream
stream.push(null); // Push null to signify the end of the stream (EOF)
return stream;
}
}

152
ts/classes.streamfile.ts Normal file
View File

@ -0,0 +1,152 @@
import * as plugins from './smartfile.plugins.js';
import * as smartfileFs from './fs.js';
import * as smartfileFsStream from './fsstream.js';
import { Readable } from 'stream';
type TStreamSource = (streamFile: StreamFile) => Promise<Readable>;
/**
* The StreamFile class represents a file as a stream.
* It allows creating streams from a file path, a URL, or a buffer.
*/
export class StreamFile {
// INSTANCE
relativeFilePath?: string;
private streamSource: TStreamSource;
// enable stream based multi use
private cachedStreamBuffer?: Buffer;
public multiUse: boolean;
public used: boolean = false;
private constructor(streamSource: TStreamSource, relativeFilePath?: string) {
this.streamSource = streamSource;
this.relativeFilePath = relativeFilePath;
}
// STATIC
public static async fromPath(filePath: string): Promise<StreamFile> {
const streamSource: TStreamSource = async (stremFileArg) => smartfileFsStream.createReadStream(filePath);
const streamFile = new StreamFile(streamSource, filePath);
streamFile.multiUse = true;
return streamFile;
}
public static async fromUrl(url: string): Promise<StreamFile> {
const streamSource: TStreamSource = async (streamFileArg) => plugins.smartrequest.getStream(url); // Replace with actual plugin method
const streamFile = new StreamFile(streamSource);
streamFile.multiUse = true;
return streamFile;
}
public static fromBuffer(buffer: Buffer, relativeFilePath?: string): StreamFile {
const streamSource: TStreamSource = async (streamFileArg) => {
const stream = new Readable();
stream.push(buffer);
stream.push(null); // End of stream
return stream;
};
const streamFile = new StreamFile(streamSource, relativeFilePath);
streamFile.multiUse = true;
return streamFile;
}
/**
* Creates a StreamFile from an existing Readable stream with an option for multiple uses.
* @param stream A Node.js Readable stream.
* @param relativeFilePath Optional file path for the stream.
* @param multiUse If true, the stream can be read multiple times, caching its content.
* @returns A StreamFile instance.
*/
public static fromStream(stream: Readable, relativeFilePath?: string, multiUse: boolean = false): StreamFile {
const streamSource: TStreamSource = (streamFileArg) => {
if (streamFileArg.multiUse) {
// If multi-use is enabled and we have cached content, create a new readable stream from the buffer
const bufferedStream = new Readable();
bufferedStream.push(streamFileArg.cachedStreamBuffer);
bufferedStream.push(null); // No more data to push
return Promise.resolve(bufferedStream);
} else {
return Promise.resolve(stream);
}
};
const streamFile = new StreamFile(streamSource, relativeFilePath);
streamFile.multiUse = multiUse;
// If multi-use is enabled, cache the stream when it's first read
if (multiUse) {
const chunks: Buffer[] = [];
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
stream.on('end', () => {
streamFile.cachedStreamBuffer = Buffer.concat(chunks);
});
// It's important to handle errors that may occur during streaming
stream.on('error', (err) => {
console.error('Error while caching stream:', err);
});
}
return streamFile;
}
// METHODS
private checkMultiUse() {
if (!this.multiUse && this.used) {
throw new Error('This stream can only be used once.');
}
this.used = true;
}
/**
* Creates a new readable stream from the source.
*/
public async createReadStream(): Promise<Readable> {
return this.streamSource(this);
}
/**
* Writes the stream to the disk at the specified path.
* @param filePathArg The file path where the stream should be written.
*/
public async writeToDisk(filePathArg: string): Promise<void> {
this.checkMultiUse();
const readStream = await this.createReadStream();
const writeStream = smartfileFsStream.createWriteStream(filePathArg);
return new Promise((resolve, reject) => {
readStream.pipe(writeStream);
readStream.on('error', reject);
writeStream.on('error', reject);
writeStream.on('finish', resolve);
});
}
public async writeToDir(dirPathArg: string) {
this.checkMultiUse();
const filePath = plugins.path.join(dirPathArg, this.relativeFilePath);
await smartfileFs.ensureDir(plugins.path.parse(filePath).dir);
return this.writeToDisk(filePath);
}
public async getContentAsBuffer() {
this.checkMultiUse();
const done = plugins.smartpromise.defer<Buffer>();
const readStream = await this.createReadStream();
const chunks: Buffer[] = [];
readStream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
readStream.on('error', done.reject);
readStream.on('end', () => {
const contentBuffer = Buffer.concat(chunks);
done.resolve(contentBuffer);
});
return done.promise;
}
public async getContentAsString(formatArg: 'utf8' | 'binary' = 'utf8') {
const contentBuffer = await this.getContentAsBuffer();
return contentBuffer.toString(formatArg);
}
}

View File

@ -1,11 +1,21 @@
import { Smartfile } from './smartfile.classes.smartfile';
import * as plugins from './smartfile.plugins';
import * as fs from './smartfile.fs';
import { SmartFile } from './classes.smartfile.js';
import * as plugins from './smartfile.plugins.js';
import * as fs from './fs.js';
export interface IVirtualDirectoryConstructorOptions {
mode: ''
}
/**
* a virtual directory exposes a fs api
*/
export class VirtualDirectory {
consstructor(options = {}) {
}
// STATIC
public static async fromFsDirPath(pathArg: string): Promise<VirtualDirectory> {
const newVirtualDir = new VirtualDirectory();
@ -18,17 +28,17 @@ export class VirtualDirectory {
): Promise<VirtualDirectory> {
const newVirtualDir = new VirtualDirectory();
for (const fileArg of virtualDirTransferableObjectArg.files) {
newVirtualDir.addSmartfiles([Smartfile.enfoldFromJson(fileArg) as Smartfile]);
newVirtualDir.addSmartfiles([SmartFile.enfoldFromJson(fileArg) as SmartFile]);
}
return newVirtualDir;
}
// INSTANCE
public smartfileArray: Smartfile[] = [];
public smartfileArray: SmartFile[] = [];
constructor() {}
public addSmartfiles(smartfileArrayArg: Smartfile[]) {
public addSmartfiles(smartfileArrayArg: SmartFile[]) {
this.smartfileArray = this.smartfileArray.concat(smartfileArrayArg);
}
@ -42,12 +52,12 @@ export class VirtualDirectory {
public async toVirtualDirTransferableObject(): Promise<plugins.smartfileInterfaces.VirtualDirTransferableObject> {
return {
files: this.smartfileArray.map(smartfileArg => smartfileArg.foldToJson())
files: this.smartfileArray.map((smartfileArg) => smartfileArg.foldToJson()),
};
}
public async saveToDisk(dirArg: string) {
console.log(`writing VirtualDirectory with ${this.smartfileArray.length} to directory:
console.log(`writing VirtualDirectory with ${this.smartfileArray.length} files to directory:
--> ${dirArg}`);
for (const smartfileArg of this.smartfileArray) {
const filePath = await smartfileArg.writeToDir(dirArg);
@ -56,6 +66,22 @@ export class VirtualDirectory {
}
}
// TODO implement root shifting to get subdirectories as new virtual directories
// TODO implement root shifting to combine VirtualDirecotries in a parent virtual directory
public async shiftToSubdirectory(subDir: string): Promise<VirtualDirectory> {
const newVirtualDir = new VirtualDirectory();
for (const file of this.smartfileArray) {
if (file.path.startsWith(subDir)) {
const adjustedFilePath = plugins.path.relative(subDir, file.path);
file.path = adjustedFilePath;
newVirtualDir.addSmartfiles([file]);
}
}
return newVirtualDir;
}
public async addVirtualDirectory(virtualDir: VirtualDirectory, newRoot: string): Promise<void> {
for (const file of virtualDir.smartfileArray) {
file.path = plugins.path.join(newRoot, file.path);
}
this.addSmartfiles(virtualDir.smartfileArray);
}
}

View File

@ -1,9 +1,9 @@
import * as plugins from './smartfile.plugins';
import * as interpreter from './smartfile.interpreter';
import * as plugins from './smartfile.plugins.js';
import * as interpreter from './interpreter.js';
import { Smartfile } from './smartfile.classes.smartfile';
import { SmartFile } from './classes.smartfile.js';
import * as memory from './smartfile.memory';
import * as memory from './memory.js';
/*===============================================================
============================ Checks =============================
===============================================================*/
@ -40,7 +40,18 @@ export const fileExists = async (filePath): Promise<boolean> => {
/**
* Checks if given path points to an existing directory
*/
export const isDirectory = (pathArg): boolean => {
export const isDirectory = (pathArg: string): boolean => {
try {
return plugins.fsExtra.statSync(pathArg).isDirectory();
} catch (err) {
return false;
}
};
/**
* Checks if given path points to an existing directory
*/
export const isDirectorySync = (pathArg: string): boolean => {
try {
return plugins.fsExtra.statSync(pathArg).isDirectory();
} catch (err) {
@ -210,11 +221,23 @@ export const toBufferSync = (filePath: string): Buffer => {
return plugins.fsExtra.readFileSync(filePath);
};
/**
* Creates a Readable Stream from a file path.
* @param filePath The path to the file.
* @returns {fs.ReadStream}
*/
export const toReadStream = (filePath: string): plugins.fs.ReadStream => {
if (!fileExistsSync(filePath)) {
throw new Error(`File does not exist at path: ${filePath}`);
}
return plugins.fsExtra.createReadStream(filePath);
};
export const fileTreeToHash = async (dirPathArg: string, miniMatchFilter: string) => {
const fileTreeObject = await fileTreeToObject(dirPathArg, miniMatchFilter);
let combinedString = '';
for (const smartfile of fileTreeObject) {
combinedString += smartfile.contentBuffer.toString();
combinedString += await smartfile.getHash();
}
const hash = await plugins.smarthash.sha256FromString(combinedString);
return hash;
@ -231,11 +254,11 @@ export const fileTreeToObject = async (dirPathArg: string, miniMatchFilter: stri
if (plugins.path.isAbsolute(miniMatchFilter)) {
dirPath = '/';
} else {
dirPath = plugins.smartpath.transform.toAbsolute(dirPathArg);
dirPath = plugins.smartpath.transform.toAbsolute(dirPathArg) as string;
}
const fileTree = await listFileTree(dirPath, miniMatchFilter);
const smartfileArray: Smartfile[] = [];
const smartfileArray: SmartFile[] = [];
for (const filePath of fileTree) {
const readPath = ((): string => {
if (!plugins.path.isAbsolute(filePath)) {
@ -244,12 +267,12 @@ export const fileTreeToObject = async (dirPathArg: string, miniMatchFilter: stri
return filePath;
}
})();
const fileContentString = toStringSync(readPath);
const fileBuffer = plugins.fs.readFileSync(readPath);
// push a read file as Smartfile
smartfileArray.push(
new Smartfile({
contentBuffer: Buffer.from(fileContentString),
new SmartFile({
contentBuffer: fileBuffer,
base: dirPath,
path: filePath,
})
@ -341,8 +364,6 @@ export const listFileTree = async (
miniMatchFilter: string,
absolutePathsBool: boolean = false
): Promise<string[]> => {
const done = plugins.smartpromise.defer<string[]>();
// handle absolute miniMatchFilter
let dirPath: string;
if (plugins.path.isAbsolute(miniMatchFilter)) {
@ -356,15 +377,8 @@ export const listFileTree = async (
nodir: true,
dot: true,
};
plugins.glob(miniMatchFilter, options, (err, files: string[]) => {
if (err) {
console.log(err);
done.reject(err);
}
done.resolve(files);
});
let fileList = await done.promise;
let fileList = await plugins.glob.glob(miniMatchFilter, options);
if (absolutePathsBool) {
fileList = fileList.map((filePath) => {
return plugins.path.resolve(plugins.path.join(dirPath, filePath));
@ -375,13 +389,58 @@ export const listFileTree = async (
};
/**
* checks wether a file is ready for processing
* Watches for file stability before resolving the promise.
*/
export const waitForFileToBeReady = async (filePathArg: string) => {
const limitedArray = new plugins.lik.LimitedArray<string>(3);
if(!plugins.path.isAbsolute(filePathArg)) {
filePathArg = plugins.path.resolve(filePathArg);
};
const stats = await plugins.fsExtra.stat(filePathArg);
stats.size
}
export const waitForFileToBeReady = (filePathArg: string): Promise<void> => {
return new Promise(async (resolve, reject) => {
let lastFileSize = -1;
let fileIsStable = false;
const checkFileStability = async () => {
let currentFileSize: number;
const deferred = plugins.smartpromise.defer();
plugins.fs.stat(filePathArg, (err, stats) => {
if (err) {
fileIsStable = true;
watcher.close();
reject(err);
return;
}
currentFileSize = stats.size;
deferred.resolve();
});
await deferred.promise;
if (currentFileSize === lastFileSize) {
fileIsStable = true;
await plugins.smartdelay.delayFor(100);
resolve();
}
lastFileSize = currentFileSize;
};
const watcher = plugins.fs.watch(filePathArg, (eventType, filename) => {
if (eventType === 'change') {
checkFileStability();
}
});
watcher.on('error', (error) => {
watcher.close();
reject(error);
});
while (!fileIsStable) {
await checkFileStability();
if (!fileIsStable) {
await plugins.smartdelay.delayFor(5000);
}
}
watcher.close();
});
};

195
ts/fsstream.ts Normal file
View File

@ -0,0 +1,195 @@
/*
This file contains logic for streaming things from and to the filesystem
*/
import * as plugins from './smartfile.plugins.js';
export const createReadStream = (pathArg: string) => {
return plugins.fs.createReadStream(pathArg);
};
export const createWriteStream = (pathArg: string) => {
return plugins.fs.createWriteStream(pathArg);
};
export const processFile = async (
filePath: string,
asyncFunc: (fileStream: plugins.stream.Readable) => Promise<void>
): Promise<void> => {
return new Promise((resolve, reject) => {
const fileStream = createReadStream(filePath);
asyncFunc(fileStream).then(resolve).catch(reject);
});
}
export const processDirectory = async (
directoryPath: string,
asyncFunc: (fileStream: plugins.stream.Readable) => Promise<void>
): Promise<void> => {
const files = plugins.fs.readdirSync(directoryPath, { withFileTypes: true });
for (const file of files) {
const fullPath = plugins.path.join(directoryPath, file.name);
if (file.isDirectory()) {
await processDirectory(fullPath, asyncFunc); // Recursively call processDirectory for directories
} else if (file.isFile()) {
await processFile(fullPath, asyncFunc); // Call async function with the file stream and wait for it
}
}
};
/**
* Checks if a file is ready to be streamed (exists and is not empty).
*/
export const isFileReadyForStreaming = async (filePathArg: string): Promise<boolean> => {
try {
const stats = await plugins.fs.promises.stat(filePathArg);
return stats.size > 0;
} catch (error) {
if (error.code === 'ENOENT') { // File does not exist
return false;
}
throw error; // Rethrow other unexpected errors
}
};
/**
* Waits for a file to be ready for streaming (exists and is not empty).
*/
export const waitForFileToBeReadyForStreaming = (filePathArg: string): Promise<void> => {
return new Promise((resolve, reject) => {
// Normalize and resolve the file path
const filePath = plugins.path.resolve(filePathArg);
// Function to check file stats
const checkFile = (resolve: () => void, reject: (reason: any) => void) => {
plugins.fs.stat(filePath, (err, stats) => {
if (err) {
if (err.code === 'ENOENT') {
// File not found, wait and try again
return;
}
// Some other error occurred
return reject(err);
}
if (stats.size > 0) {
// File exists and is not empty, resolve the promise
resolve();
}
});
};
// Set up file watcher
const watcher = plugins.fs.watch(filePath, { persistent: false }, (eventType) => {
if (eventType === 'change' || eventType === 'rename') {
checkFile(resolve, reject);
}
});
// Check file immediately in case it's already ready
checkFile(resolve, reject);
// Error handling
watcher.on('error', (error) => {
watcher.close();
reject(error);
});
});
};
class SmartReadStream extends plugins.stream.Readable {
private watcher: plugins.fs.FSWatcher | null = null;
private lastReadSize: number = 0;
private endTimeout: NodeJS.Timeout | null = null;
private filePath: string;
private endDelay: number;
private reading: boolean = false;
constructor(filePath: string, endDelay = 60000, opts?: plugins.stream.ReadableOptions) {
super(opts);
this.filePath = filePath;
this.endDelay = endDelay;
}
private startWatching(): void {
this.watcher = plugins.fs.watch(this.filePath, (eventType) => {
if (eventType === 'change') {
this.resetEndTimeout();
}
});
this.watcher.on('error', (error) => {
this.cleanup();
this.emit('error', error);
});
}
private resetEndTimeout(): void {
if (this.endTimeout) clearTimeout(this.endTimeout);
this.endTimeout = setTimeout(() => this.checkForEnd(), this.endDelay);
}
private checkForEnd(): void {
plugins.fs.stat(this.filePath, (err, stats) => {
if (err) {
this.emit('error', err);
return;
}
if (this.lastReadSize === stats.size) {
this.push(null); // Signal the end of the stream
this.cleanup();
} else {
this.lastReadSize = stats.size;
this.resetEndTimeout();
if (!this.reading) {
// We only want to continue reading if we were previously waiting for more data
this.reading = true;
this._read(10000); // Try to read more data
}
}
});
}
private cleanup(): void {
if (this.endTimeout) clearTimeout(this.endTimeout);
if (this.watcher) this.watcher.close();
}
_read(size: number): void {
this.reading = true;
const chunkSize = Math.min(size, 16384); // Read in chunks of 16KB
const buffer = Buffer.alloc(chunkSize);
plugins.fs.open(this.filePath, 'r', (err, fd) => {
if (err) {
this.emit('error', err);
return;
}
plugins.fs.read(fd, buffer, 0, chunkSize, this.lastReadSize, (err, bytesRead, buffer) => {
if (err) {
this.emit('error', err);
return;
}
if (bytesRead > 0) {
this.lastReadSize += bytesRead;
this.push(buffer.slice(0, bytesRead)); // Push the data onto the stream
} else {
this.reading = false; // No more data to read for now
this.resetEndTimeout();
}
plugins.fs.close(fd, (err) => {
if (err) {
this.emit('error', err);
}
});
});
});
}
_destroy(error: Error | null, callback: (error: Error | null) => void): void {
this.cleanup();
callback(error);
}
}

View File

@ -1,12 +1,14 @@
import * as plugins from './smartfile.plugins';
import * as fsMod from './smartfile.fs';
import * as interpreterMod from './smartfile.interpreter';
import * as memoryMod from './smartfile.memory';
import * as plugins from './smartfile.plugins.js';
import * as fsMod from './fs.js';
import * as fsStreamMod from './fsstream.js';
import * as interpreterMod from './interpreter.js';
import * as memoryMod from './memory.js';
export { Smartfile, ISmartfileConstructorOptions } from './smartfile.classes.smartfile';
export { VirtualDirectory } from './smartfile.classes.virtualdirectory';
export * from './classes.smartfile.js';
export * from './classes.streamfile.js';
export * from './classes.virtualdirectory.js';
export let fs = fsMod;
export let interpreter = interpreterMod;
export let memory = memoryMod;
export const fs = fsMod;
export const fsStream = fsStreamMod;
export const interpreter = interpreterMod;
export const memory = memoryMod;

View File

@ -1,4 +1,4 @@
import * as plugins from './smartfile.plugins';
import * as plugins from './smartfile.plugins.js';
export let filetype = (pathArg: string): string => {
const extName = plugins.path.extname(pathArg);

View File

@ -1,7 +1,8 @@
import * as plugins from './smartfile.plugins';
import { Smartfile } from './smartfile.classes.smartfile';
import * as smartfileFs from './smartfile.fs';
import * as interpreter from './smartfile.interpreter';
import * as plugins from './smartfile.plugins.js';
import { SmartFile } from './classes.smartfile.js';
import * as smartfileFs from './fs.js';
import * as interpreter from './interpreter.js';
import type { StreamFile } from './classes.streamfile.js';
/**
* converts file to Object
@ -24,7 +25,7 @@ export interface IToFsOptions {
* @param fileBaseArg
*/
export let toFs = async (
fileContentArg: string | Buffer | Smartfile,
fileContentArg: string | Buffer | SmartFile | StreamFile,
filePathArg: string,
optionsArg: IToFsOptions = {}
) => {
@ -41,7 +42,7 @@ export let toFs = async (
let filePath: string = filePathArg;
// handle Smartfile
if (fileContentArg instanceof Smartfile) {
if (fileContentArg instanceof SmartFile) {
fileContent = fileContentArg.contentBuffer;
// handle options
if (optionsArg.respectRelative) {
@ -83,7 +84,7 @@ export const toFsSync = (fileArg: string, filePathArg: string) => {
plugins.fsExtra.writeFileSync(filePath, fileString, { encoding: 'utf8' });
};
export let smartfileArrayToFs = async (smartfileArrayArg: Smartfile[], dirArg: string) => {
export let smartfileArrayToFs = async (smartfileArrayArg: SmartFile[], dirArg: string) => {
await smartfileFs.ensureDir(dirArg);
for (const smartfile of smartfileArrayArg) {
await toFs(smartfile, dirArg, {

View File

@ -1,24 +1,38 @@
// node native scope
import * as fs from 'fs';
import * as path from 'path';
import * as stream from 'stream';
export { fs, path };
export { fs, path, stream };
// @pushrocks scope
import * as lik from '@pushrocks/lik';
import * as smartfileInterfaces from '@pushrocks/smartfile-interfaces';
import * as smarthash from '@pushrocks/smarthash';
import * as smartjson from '@pushrocks/smartjson';
import * as smartmime from '@pushrocks/smartmime';
import * as smartpath from '@pushrocks/smartpath';
import * as smartpromise from '@pushrocks/smartpromise';
import * as smartrequest from '@pushrocks/smartrequest';
import * as lik from '@push.rocks/lik';
import * as smartfileInterfaces from '@push.rocks/smartfile-interfaces';
import * as smartdelay from '@push.rocks/smartdelay';
import * as smarthash from '@push.rocks/smarthash';
import * as smartjson from '@push.rocks/smartjson';
import * as smartmime from '@push.rocks/smartmime';
import * as smartpath from '@push.rocks/smartpath';
import * as smartpromise from '@push.rocks/smartpromise';
import * as smartrequest from '@push.rocks/smartrequest';
import * as smartstream from '@push.rocks/smartstream';
export { lik, smartfileInterfaces, smarthash, smartjson, smartmime, smartpath, smartpromise, smartrequest };
export {
lik,
smartfileInterfaces,
smartdelay,
smarthash,
smartjson,
smartmime,
smartpath,
smartpromise,
smartrequest,
smartstream,
};
// third party scope
import * as fsExtra from 'fs-extra';
import glob from 'glob';
import fsExtra from 'fs-extra';
import * as glob from 'glob';
import yaml from 'js-yaml';
export { fsExtra, glob, yaml };

View File

@ -1,8 +1,11 @@
{
"compilerOptions": {
"experimentalDecorators": true,
"useDefineForClassFields": false,
"target": "ES2022",
"module": "ES2022",
"moduleResolution": "nodenext",
"esModuleInterop": true,
"target": "ES2017",
"moduleResolution": "node"
"verbatimModuleSyntax": true,
}
}
}

View File

@ -1,17 +0,0 @@
{
"extends": ["tslint:latest", "tslint-config-prettier"],
"rules": {
"semicolon": [true, "always"],
"no-console": false,
"ordered-imports": false,
"object-literal-sort-keys": false,
"member-ordering": {
"options":{
"order": [
"static-method"
]
}
}
},
"defaultSeverity": "warning"
}