Compare commits

..

82 Commits

Author SHA1 Message Date
b925e5e662 11.0.0 2023-11-06 11:15:12 +01:00
98a5d2c94d BREAKING CHANGE(core): update 2023-11-06 11:15:11 +01:00
0e735cba20 10.0.40 2023-11-04 20:54:14 +01:00
f815457801 fix(core): update 2023-11-04 20:54:13 +01:00
f7e47ae354 10.0.39 2023-11-04 20:43:55 +01:00
684e893801 fix(core): update 2023-11-04 20:43:54 +01:00
d4b381d33d 10.0.38 2023-11-04 20:14:21 +01:00
291a11aa60 fix(core): update 2023-11-04 20:14:20 +01:00
ca592afec9 update 2023-11-04 20:07:43 +01:00
8b07197224 10.0.37 2023-11-03 02:31:57 +01:00
b60fd15ec6 fix(core): update 2023-11-03 02:31:57 +01:00
853eccc780 10.0.36 2023-11-03 02:24:37 +01:00
c26aff85b5 fix(core): update 2023-11-03 02:24:36 +01:00
321e4d9dea 10.0.35 2023-11-03 01:25:38 +01:00
3d2789857c fix(core): update 2023-11-03 01:25:37 +01:00
07b88a078d 10.0.34 2023-11-03 00:41:06 +01:00
6fee0028d8 fix(core): update 2023-11-03 00:41:05 +01:00
629c52f9bc 10.0.33 2023-10-12 02:21:40 +02:00
fd056c29e9 fix(core): update 2023-10-12 02:21:39 +02:00
36c456b509 10.0.32 2023-09-22 17:05:35 +02:00
16f8c25557 fix(core): update 2023-09-22 17:05:35 +02:00
219e070ba2 10.0.31 2023-08-31 18:45:24 +02:00
ee97e1d88b fix(core): update 2023-08-31 18:45:23 +02:00
279db74568 10.0.30 2023-08-23 10:58:38 +02:00
b84c504f11 fix(core): update 2023-08-23 10:58:38 +02:00
7b3194cc13 10.0.29 2023-08-23 09:38:49 +02:00
e1e821efec fix(core): update 2023-08-23 09:38:49 +02:00
6b613d1b8a 10.0.28 2023-07-12 10:00:40 +02:00
70f1c58a82 fix(core): update 2023-07-12 10:00:40 +02:00
5df76ca94b 10.0.27 2023-07-10 23:07:51 +02:00
32cfda3c90 fix(core): update 2023-07-10 23:07:50 +02:00
dd521398ea switch to new org scheme 2023-07-10 02:55:52 +02:00
038e6cc33d 10.0.26 2023-07-08 16:24:53 +02:00
2fc37d6892 fix(core): update 2023-07-08 16:24:53 +02:00
3c1eb1ab70 10.0.25 2023-06-25 19:01:11 +02:00
5296e8859b fix(core): update 2023-06-25 19:01:10 +02:00
160e0ae451 10.0.24 2023-06-25 18:21:13 +02:00
373c6538ae fix(core): update 2023-06-25 18:21:12 +02:00
7a1476e106 10.0.23 2023-06-25 18:06:56 +02:00
b2a2035f00 fix(core): update 2023-06-25 18:06:56 +02:00
03e4f03035 10.0.22 2023-06-25 17:53:44 +02:00
d74bbb2b12 fix(core): update 2023-06-25 17:53:43 +02:00
22cfe1f5cb 10.0.21 2023-06-25 17:47:42 +02:00
5fc2c4586d fix(core): update 2023-06-25 17:47:42 +02:00
6ab81fb323 10.0.20 2023-06-25 13:55:04 +02:00
58ec27a1a0 fix(core): update 2023-06-25 13:55:03 +02:00
88811646b7 10.0.19 2023-06-25 01:36:06 +02:00
412bb52eee fix(core): update 2023-06-25 01:36:05 +02:00
b04750ecbd 10.0.18 2023-06-25 01:34:41 +02:00
0c99475888 fix(core): update 2023-06-25 01:34:40 +02:00
86317def88 10.0.17 2023-06-24 20:36:53 +02:00
0a5af0ba96 fix(core): update 2023-06-24 20:36:52 +02:00
cf73ff4a54 10.0.16 2023-06-24 11:20:50 +02:00
788897e765 fix(core): update 2023-06-24 11:20:50 +02:00
7fa3894f6e 10.0.15 2023-06-24 01:26:08 +02:00
afdd654664 fix(core): update 2023-06-24 01:26:08 +02:00
8277e0ca6d 10.0.14 2023-06-23 18:49:56 +02:00
0892c87a68 fix(core): update 2023-06-23 18:49:56 +02:00
841ba2e14d 10.0.13 2023-06-23 18:46:43 +02:00
5b2953bf02 fix(core): update 2023-06-23 18:46:42 +02:00
d3457fd65b 10.0.12 2023-06-23 18:43:50 +02:00
3bf1eafe6b fix(core): update 2023-06-23 18:43:49 +02:00
04a9b992d7 10.0.11 2023-06-23 18:39:02 +02:00
6206b55deb fix(core): update 2023-06-23 18:39:01 +02:00
6c05bf6ae3 10.0.10 2023-06-23 18:11:04 +02:00
371074afc1 fix(core): update 2023-06-23 18:11:04 +02:00
e0e665fe6d 10.0.9 2023-06-23 18:08:16 +02:00
5483202972 fix(core): update 2023-06-23 18:08:15 +02:00
24b3458888 10.0.8 2023-06-23 16:44:45 +02:00
cffa47ac3d fix(core): update 2023-06-23 16:44:44 +02:00
9dadf3f78f 10.0.7 2023-01-09 15:34:06 +01:00
b35a671fe9 fix(core): update 2023-01-09 15:34:05 +01:00
0ca38c109e 10.0.6 2023-01-09 15:32:37 +01:00
4e2321e1ee fix(core): update 2023-01-09 15:32:37 +01:00
a640ab3d7b 10.0.5 2022-09-05 00:21:43 +02:00
37d6d56287 fix(core): update 2022-09-05 00:21:42 +02:00
443a026502 10.0.4 2022-07-24 23:11:41 +02:00
9644c5b7e3 fix(core): update 2022-07-24 23:11:41 +02:00
25faa8c697 10.0.3 2022-07-24 23:04:52 +02:00
982387aaa3 fix(core): update 2022-07-24 23:04:51 +02:00
4a11f50efe 10.0.2 2022-06-09 19:27:00 +02:00
0ddec29392 fix(core): update 2022-06-09 19:26:59 +02:00
25 changed files with 6275 additions and 11600 deletions

View File

@ -0,0 +1,66 @@
name: Default (not tags)
on:
push:
tags-ignore:
- '**'
env:
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
jobs:
security:
runs-on: ubuntu-latest
continue-on-error: true
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Install pnpm and npmci
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
- name: Run npm prepare
run: npmci npm prepare
- name: Audit production dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --prod
continue-on-error: true
- name: Audit development dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --dev
continue-on-error: true
test:
if: ${{ always() }}
needs: security
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Test stable
run: |
npmci node install stable
npmci npm install
npmci npm test
- name: Test build
run: |
npmci node install stable
npmci npm install
npmci npm build

View File

@ -0,0 +1,124 @@
name: Default (tags)
on:
push:
tags:
- '*'
env:
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
jobs:
security:
runs-on: ubuntu-latest
continue-on-error: true
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
npmci npm prepare
- name: Audit production dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --prod
continue-on-error: true
- name: Audit development dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --dev
continue-on-error: true
test:
if: ${{ always() }}
needs: security
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
npmci npm prepare
- name: Test stable
run: |
npmci node install stable
npmci npm install
npmci npm test
- name: Test build
run: |
npmci node install stable
npmci npm install
npmci npm build
release:
needs: test
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
npmci npm prepare
- name: Release
run: |
npmci node install stable
npmci npm publish
metadata:
needs: test
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
continue-on-error: true
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
npmci npm prepare
- name: Code quality
run: |
npmci command npm install -g typescript
npmci npm install
- name: Trigger
run: npmci trigger
- name: Build docs and upload artifacts
run: |
npmci node install stable
npmci npm install
pnpm install -g @git.zone/tsdoc
npmci command tsdoc
continue-on-error: true

View File

@ -1,141 +0,0 @@
# gitzone ci_default
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
cache:
paths:
- .npmci_cache/
key: '$CI_BUILD_STAGE'
stages:
- security
- test
- release
- metadata
before_script:
- npm install -g @shipzone/npmci
# ====================
# security stage
# ====================
mirror:
stage: security
script:
- npmci git mirror
only:
- tags
tags:
- lossless
- docker
- notpriv
auditProductionDependencies:
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
stage: security
script:
- npmci npm prepare
- npmci command npm install --production --ignore-scripts
- npmci command npm config set registry https://registry.npmjs.org
- npmci command npm audit --audit-level=high --only=prod --production
tags:
- docker
allow_failure: true
auditDevDependencies:
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
stage: security
script:
- npmci npm prepare
- npmci command npm install --ignore-scripts
- npmci command npm config set registry https://registry.npmjs.org
- npmci command npm audit --audit-level=high --only=dev
tags:
- docker
allow_failure: true
# ====================
# test stage
# ====================
testStable:
stage: test
script:
- npmci npm prepare
- npmci node install stable
- npmci npm install
- npmci npm test
coverage: /\d+.?\d+?\%\s*coverage/
tags:
- docker
testBuild:
stage: test
script:
- npmci npm prepare
- npmci node install stable
- npmci npm install
- npmci command npm run build
coverage: /\d+.?\d+?\%\s*coverage/
tags:
- docker
release:
stage: release
script:
- npmci node install stable
- npmci npm publish
only:
- tags
tags:
- lossless
- docker
- notpriv
# ====================
# metadata stage
# ====================
codequality:
stage: metadata
allow_failure: true
only:
- tags
script:
- npmci command npm install -g tslint typescript
- npmci npm prepare
- npmci npm install
- npmci command "tslint -c tslint.json ./ts/**/*.ts"
tags:
- lossless
- docker
- priv
trigger:
stage: metadata
script:
- npmci trigger
only:
- tags
tags:
- lossless
- docker
- notpriv
pages:
stage: metadata
script:
- npmci node install lts
- npmci command npm install -g @gitzone/tsdoc
- npmci npm prepare
- npmci npm install
- npmci command tsdoc
tags:
- lossless
- docker
- notpriv
only:
- tags
artifacts:
expire_in: 1 week
paths:
- public
allow_failure: true

View File

@ -8,10 +8,10 @@
"projectType": "npm", "projectType": "npm",
"module": { "module": {
"githost": "gitlab.com", "githost": "gitlab.com",
"gitscope": "pushrocks", "gitscope": "push.rocks",
"gitrepo": "smartfile", "gitrepo": "smartfile",
"description": "smart ways to work with files in nodejs", "description": "smart ways to work with files in nodejs",
"npmPackagename": "@pushrocks/smartfile", "npmPackagename": "@push.rocks/smartfile",
"license": "MIT" "license": "MIT"
} }
} }

11317
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,18 +1,19 @@
{ {
"name": "@pushrocks/smartfile", "name": "@push.rocks/smartfile",
"private": false, "private": false,
"version": "10.0.1", "version": "11.0.0",
"description": "offers smart ways to work with files in nodejs", "description": "offers smart ways to work with files in nodejs",
"main": "dist_ts/index.js", "main": "dist_ts/index.js",
"typings": "dist_ts/index.d.ts", "typings": "dist_ts/index.d.ts",
"type": "module", "type": "module",
"scripts": { "scripts": {
"test": "(tstest test/)", "test": "(tstest test/)",
"build": "(tsbuild --web --allowimplicitany)" "build": "(tsbuild --web --allowimplicitany)",
"buildDocs": "tsdoc"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://gitlab.com/pushrocks/smartfile.git" "url": "git+https://gitlab.com/push.rocks/smartfile.git"
}, },
"keywords": [ "keywords": [
"filesystem", "filesystem",
@ -21,35 +22,33 @@
"author": "Lossless GmbH <hello@lossless.com> (https://lossless.com)", "author": "Lossless GmbH <hello@lossless.com> (https://lossless.com)",
"license": "MIT", "license": "MIT",
"bugs": { "bugs": {
"url": "https://gitlab.com/pushrocks/smartfile/issues" "url": "https://gitlab.com/push.rocks/smartfile/issues"
}, },
"homepage": "https://gitlab.com/pushrocks/smartfile", "homepage": "https://gitlab.com/push.rocks/smartfile#readme",
"dependencies": { "dependencies": {
"@pushrocks/lik": "^6.0.0", "@push.rocks/lik": "^6.0.5",
"@pushrocks/smartdelay": "^2.0.13", "@push.rocks/smartdelay": "^3.0.5",
"@pushrocks/smartfile-interfaces": "^1.0.7", "@push.rocks/smartfile-interfaces": "^1.0.7",
"@pushrocks/smarthash": "^2.1.10", "@push.rocks/smarthash": "^3.0.4",
"@pushrocks/smartjson": "^4.0.6", "@push.rocks/smartjson": "^5.0.10",
"@pushrocks/smartmime": "^1.0.5", "@push.rocks/smartmime": "^1.0.5",
"@pushrocks/smartpath": "^5.0.5", "@push.rocks/smartpath": "^5.0.11",
"@pushrocks/smartpromise": "^3.1.6", "@push.rocks/smartpromise": "^4.0.2",
"@pushrocks/smartrequest": "^1.1.56", "@push.rocks/smartrequest": "^2.0.20",
"@pushrocks/smartstream": "^2.0.1", "@push.rocks/smartstream": "^3.0.7",
"@pushrocks/streamfunction": "^4.0.4", "@types/fs-extra": "^11.0.3",
"@types/fs-extra": "^9.0.13", "@types/glob": "^8.1.0",
"@types/glob": "^7.2.0", "@types/js-yaml": "^4.0.8",
"@types/js-yaml": "^4.0.5", "fs-extra": "^11.1.1",
"fs-extra": "^10.1.0", "glob": "^10.3.10",
"glob": "^8.0.3",
"js-yaml": "^4.1.0" "js-yaml": "^4.1.0"
}, },
"devDependencies": { "devDependencies": {
"@gitzone/tsbuild": "^2.1.63", "@git.zone/tsbuild": "^2.1.70",
"@gitzone/tsrun": "^1.2.35", "@git.zone/tsrun": "^1.2.46",
"@gitzone/tstest": "^1.0.71", "@git.zone/tstest": "^1.0.81",
"@pushrocks/tapbundle": "^5.0.3", "@push.rocks/tapbundle": "^5.0.15",
"@types/node": "^17.0.40", "@types/node": "^20.8.10"
"gulp-function": "^2.2.14"
}, },
"files": [ "files": [
"ts/**/*", "ts/**/*",

5583
pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,27 +1,26 @@
# @pushrocks/smartfile # @push.rocks/smartfile
smart ways to work with files in nodejs smart ways to work with files in nodejs
## Availabililty and Links ## Availabililty and Links
* [npmjs.org (npm package)](https://www.npmjs.com/package/@pushrocks/smartfile) * [npmjs.org (npm package)](https://www.npmjs.com/package/@push.rocks/smartfile)
* [gitlab.com (source)](https://gitlab.com/pushrocks/smartfile) * [gitlab.com (source)](https://gitlab.com/push.rocks/smartfile)
* [github.com (source mirror)](https://github.com/pushrocks/smartfile) * [github.com (source mirror)](https://github.com/push.rocks/smartfile)
* [docs (typedoc)](https://pushrocks.gitlab.io/smartfile/) * [docs (typedoc)](https://push.rocks.gitlab.io/smartfile/)
## Status for master ## Status for master
Status Category | Status Badge Status Category | Status Badge
-- | -- -- | --
GitLab Pipelines | [![pipeline status](https://gitlab.com/pushrocks/smartfile/badges/master/pipeline.svg)](https://lossless.cloud) GitLab Pipelines | [![pipeline status](https://gitlab.com/push.rocks/smartfile/badges/master/pipeline.svg)](https://lossless.cloud)
GitLab Pipline Test Coverage | [![coverage report](https://gitlab.com/pushrocks/smartfile/badges/master/coverage.svg)](https://lossless.cloud) GitLab Pipline Test Coverage | [![coverage report](https://gitlab.com/push.rocks/smartfile/badges/master/coverage.svg)](https://lossless.cloud)
npm | [![npm downloads per month](https://badgen.net/npm/dy/@pushrocks/smartfile)](https://lossless.cloud) npm | [![npm downloads per month](https://badgen.net/npm/dy/@push.rocks/smartfile)](https://lossless.cloud)
Snyk | [![Known Vulnerabilities](https://badgen.net/snyk/pushrocks/smartfile)](https://lossless.cloud) Snyk | [![Known Vulnerabilities](https://badgen.net/snyk/push.rocks/smartfile)](https://lossless.cloud)
TypeScript Support | [![TypeScript](https://badgen.net/badge/TypeScript/>=%203.x/blue?icon=typescript)](https://lossless.cloud) TypeScript Support | [![TypeScript](https://badgen.net/badge/TypeScript/>=%203.x/blue?icon=typescript)](https://lossless.cloud)
node Support | [![node](https://img.shields.io/badge/node->=%2010.x.x-blue.svg)](https://nodejs.org/dist/latest-v10.x/docs/api/) node Support | [![node](https://img.shields.io/badge/node->=%2010.x.x-blue.svg)](https://nodejs.org/dist/latest-v10.x/docs/api/)
Code Style | [![Code Style](https://badgen.net/badge/style/prettier/purple)](https://lossless.cloud) Code Style | [![Code Style](https://badgen.net/badge/style/prettier/purple)](https://lossless.cloud)
PackagePhobia (total standalone install weight) | [![PackagePhobia](https://badgen.net/packagephobia/install/@pushrocks/smartfile)](https://lossless.cloud) PackagePhobia (total standalone install weight) | [![PackagePhobia](https://badgen.net/packagephobia/install/@push.rocks/smartfile)](https://lossless.cloud)
PackagePhobia (package size on registry) | [![PackagePhobia](https://badgen.net/packagephobia/publish/@pushrocks/smartfile)](https://lossless.cloud) PackagePhobia (package size on registry) | [![PackagePhobia](https://badgen.net/packagephobia/publish/@push.rocks/smartfile)](https://lossless.cloud)
BundlePhobia (total size when bundled) | [![BundlePhobia](https://badgen.net/bundlephobia/minzip/@pushrocks/smartfile)](https://lossless.cloud) BundlePhobia (total size when bundled) | [![BundlePhobia](https://badgen.net/bundlephobia/minzip/@push.rocks/smartfile)](https://lossless.cloud)
Platform support | [![Supports Windows 10](https://badgen.net/badge/supports%20Windows%2010/yes/green?icon=windows)](https://lossless.cloud) [![Supports Mac OS X](https://badgen.net/badge/supports%20Mac%20OS%20X/yes/green?icon=apple)](https://lossless.cloud)
## Usage ## Usage
@ -45,7 +44,6 @@ We are always happy for code contributions. If you are not the code contributing
For further information read the linked docs at the top of this readme. For further information read the linked docs at the top of this readme.
> MIT licensed | **&copy;** [Lossless GmbH](https://lossless.gmbh) ## Legal
> MIT licensed | **&copy;** [Task Venture Capital GmbH](https://task.vc)
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy) | By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy)
[![repo-footer](https://lossless.gitlab.io/publicrelations/repofooter.svg)](https://maintainedby.lossless.com)

67
test/test.streamfile.ts Normal file
View File

@ -0,0 +1,67 @@
import * as path from 'path';
import { expect, tap } from '@push.rocks/tapbundle';
import * as smartfile from '../ts/index.js'; // adjust the import path as needed
// Test assets path
const testAssetsPath = './test/testassets/';
// ---------------------------
// StreamFile tests
// ---------------------------
tap.test('StreamFile.fromPath should create a StreamFile from a file path', async () => {
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
expect(streamFile).toBeInstanceOf(smartfile.StreamFile);
const contentBuffer = await streamFile.getContentAsBuffer();
expect(contentBuffer).toBeInstanceOf(Buffer);
});
tap.test('StreamFile.fromUrl should create a StreamFile from a URL', async () => {
const streamFile = await smartfile.StreamFile.fromUrl('http://example.com/somefile.json');
expect(streamFile).toBeInstanceOf(smartfile.StreamFile);
});
tap.test('StreamFile.fromBuffer should create a StreamFile from a Buffer', async () => {
const buffer = Buffer.from('Some content');
const streamFile = smartfile.StreamFile.fromBuffer(buffer, 'bufferfile.txt');
expect(streamFile).toBeInstanceOf(smartfile.StreamFile);
});
tap.test('StreamFile should write the stream to disk', async () => {
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
await streamFile.writeToDisk(path.join(testAssetsPath, 'temp', 'mytest.json'));
// Verify the file was written
expect(
// We'll use the fileExists method from your smartfile library
// Replace with the actual method you use to check file existence
await smartfile.fs.fileExists(path.join(testAssetsPath, 'temp', 'mytest.json'))
).toBeTrue();
});
tap.test('StreamFile should write to a directory', async () => {
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
await streamFile.writeToDir(path.join(testAssetsPath, 'temp'));
// Verify the file was written
expect(
await smartfile.fs.fileExists(path.join(testAssetsPath, 'temp', 'mytest.json'))
).toBeTrue();
});
tap.test('StreamFile should return content as a buffer', async () => {
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
const contentBuffer = await streamFile.getContentAsBuffer();
expect(contentBuffer).toBeInstanceOf(Buffer);
// Further checks on the content can be added here if necessary
});
tap.test('StreamFile should return content as a string', async () => {
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
const contentString = await streamFile.getContentAsString();
expect(typeof contentString).toBeTypeofString();
// Verify the content matches what's expected
// This assumes the file contains a JSON object with a key 'key1' with value 'this works'
expect(JSON.parse(contentString).key1).toEqual('this works');
});
// Start the test sequence
tap.start();

View File

@ -1,7 +1,7 @@
import * as smartfile from '../ts/index.js'; import * as smartfile from '../ts/index.js';
import * as path from 'path'; import * as path from 'path';
import { expect, tap } from '@pushrocks/tapbundle'; import { expect, tap } from '@push.rocks/tapbundle';
// --------------------------- // ---------------------------
// smartfile.fs // smartfile.fs
@ -38,9 +38,7 @@ tap.test('.fs.listFolders() -> should get the file type from a string', async ()
tap.test('.fs.listFilesSync() -> should get the file type from a string', async () => { tap.test('.fs.listFilesSync() -> should get the file type from a string', async () => {
expect(smartfile.fs.listFilesSync('./test/testassets/')).toContain('mytest.json'); expect(smartfile.fs.listFilesSync('./test/testassets/')).toContain('mytest.json');
expect(smartfile.fs.listFilesSync('./test/testassets/')).not.toContain('notExistentFile'); expect(smartfile.fs.listFilesSync('./test/testassets/')).not.toContain('notExistentFile');
expect(smartfile.fs.listFilesSync('./test/testassets/', /mytest\.json/)).toContain( expect(smartfile.fs.listFilesSync('./test/testassets/', /mytest\.json/)).toContain('mytest.json');
'mytest.json'
);
expect(smartfile.fs.listFilesSync('./test/testassets/', /mytests.json/)).not.toContain( expect(smartfile.fs.listFilesSync('./test/testassets/', /mytests.json/)).not.toContain(
'mytest.json' 'mytest.json'
); );
@ -66,7 +64,7 @@ tap.test('.fs.fileTreeToObject -> should read a file tree into an Object', async
path.resolve('./test/testassets/'), path.resolve('./test/testassets/'),
'**/*.txt' '**/*.txt'
); );
expect(fileArrayArg[0]).toBeInstanceOf(smartfile.Smartfile); expect(fileArrayArg[0]).toBeInstanceOf(smartfile.SmartFile);
expect(fileArrayArg[0].contents.toString()).toEqual(fileArrayArg[0].contentBuffer.toString()); expect(fileArrayArg[0].contents.toString()).toEqual(fileArrayArg[0].contentBuffer.toString());
}); });
@ -133,9 +131,7 @@ tap.test('.fs.toObjectSync() -> should read an .json file to an object', async (
}); });
tap.test('.fs.toStringSync() -> should read a file to a string', async () => { tap.test('.fs.toStringSync() -> should read a file to a string', async () => {
expect(smartfile.fs.toStringSync('./test/testassets/mytest.txt')).toEqual( expect(smartfile.fs.toStringSync('./test/testassets/mytest.txt')).toEqual('Some TestString &&%$');
'Some TestString &&%$'
);
}); });
// --------------------------- // ---------------------------
@ -179,7 +175,7 @@ tap.test('.Smartfile -> should produce vinyl compatible files', async () => {
'./test/testassets/testfolder/**/*' './test/testassets/testfolder/**/*'
); );
const localSmartfile = smartfileArray[0]; const localSmartfile = smartfileArray[0];
expect(localSmartfile).toBeInstanceOf(smartfile.Smartfile); expect(localSmartfile).toBeInstanceOf(smartfile.SmartFile);
expect(localSmartfile.contents).toBeInstanceOf(Buffer); expect(localSmartfile.contents).toBeInstanceOf(Buffer);
// tslint:disable-next-line:no-unused-expression // tslint:disable-next-line:no-unused-expression
expect(localSmartfile.isBuffer()).toBeTrue(); expect(localSmartfile.isBuffer()).toBeTrue();
@ -206,13 +202,21 @@ tap.test('should output a smartfile array to disk', async () => {
tap.test('should create, store and retrieve valid smartfiles', async () => { tap.test('should create, store and retrieve valid smartfiles', async () => {
const fileString = 'hi there'; const fileString = 'hi there';
const filePath = './test/testassets/utf8.txt'; const filePath = './test/testassets/utf8.txt';
const smartfileInstance = await smartfile.Smartfile.fromString(filePath, fileString, 'utf8'); const smartfileInstance = await smartfile.SmartFile.fromString(filePath, fileString, 'utf8');
smartfileInstance.write(); smartfileInstance.write();
const smartfileInstance2 = await smartfile.Smartfile.fromFilePath(filePath); const smartfileInstance2 = await smartfile.SmartFile.fromFilePath(filePath);
const retrievedString = smartfileInstance.contents.toString(); const retrievedString = smartfileInstance.contents.toString();
expect(retrievedString).toEqual(fileString); expect(retrievedString).toEqual(fileString);
}); });
tap.test('should get a hash', async () => {
const fileString = 'hi there';
const filePath = './test/testassets/utf8.txt';
const smartfileInstance = await smartfile.SmartFile.fromString(filePath, fileString, 'utf8');
const hash = await smartfileInstance.getHash();
console.log(hash);
});
tap.test('should wait for file to be ready', async () => { tap.test('should wait for file to be ready', async () => {
await smartfile.fs.waitForFileToBeReady('./test/testassets/mytest.json'); await smartfile.fs.waitForFileToBeReady('./test/testassets/mytest.json');
}); });

View File

@ -1,4 +1,4 @@
import { tap, expect } from '@pushrocks/tapbundle'; import { tap, expect } from '@push.rocks/tapbundle';
import * as smartfile from '../ts/index.js'; import * as smartfile from '../ts/index.js';

View File

@ -0,0 +1,8 @@
{
"key1": "this works",
"key2": "this works too",
"key3": {
"nestedkey1": "hello"
}
}

View File

@ -0,0 +1,8 @@
{
"key1": "this works",
"key2": "this works too",
"key3": {
"nestedkey1": "hello"
}
}

View File

@ -2,7 +2,7 @@
* autocreated commitinfo by @pushrocks/commitinfo * autocreated commitinfo by @pushrocks/commitinfo
*/ */
export const commitinfo = { export const commitinfo = {
name: '@pushrocks/smartfile', name: '@push.rocks/smartfile',
version: '10.0.1', version: '11.0.0',
description: 'offers smart ways to work with files in nodejs' description: 'offers smart ways to work with files in nodejs'
} }

View File

@ -1,6 +1,6 @@
import * as plugins from './smartfile.plugins.js'; import * as plugins from './smartfile.plugins.js';
import * as fs from './smartfile.fs.js'; import * as fs from './fs.js';
import * as memory from './smartfile.memory.js'; import * as memory from './memory.js';
export interface ISmartfileConstructorOptions { export interface ISmartfileConstructorOptions {
path: string; path: string;
@ -9,10 +9,9 @@ export interface ISmartfileConstructorOptions {
} }
/** /**
* class Smartfile * an vinyl file compatible in memory file class
* -> is vinyl file compatible
*/ */
export class Smartfile extends plugins.smartjson.Smartjson { export class SmartFile extends plugins.smartjson.Smartjson {
// ====== // ======
// STATIC // STATIC
// ====== // ======
@ -24,7 +23,7 @@ export class Smartfile extends plugins.smartjson.Smartjson {
public static async fromFilePath(filePath: string, baseArg: string = process.cwd()) { public static async fromFilePath(filePath: string, baseArg: string = process.cwd()) {
filePath = plugins.path.resolve(filePath); filePath = plugins.path.resolve(filePath);
const fileBuffer = fs.toBufferSync(filePath); const fileBuffer = fs.toBufferSync(filePath);
const smartfile = new Smartfile({ const smartfile = new SmartFile({
contentBuffer: fileBuffer, contentBuffer: fileBuffer,
base: baseArg, base: baseArg,
path: plugins.path.relative(baseArg, filePath), path: plugins.path.relative(baseArg, filePath),
@ -37,7 +36,7 @@ export class Smartfile extends plugins.smartjson.Smartjson {
contentBufferArg: Buffer, contentBufferArg: Buffer,
baseArg: string = process.cwd() baseArg: string = process.cwd()
) { ) {
const smartfile = new Smartfile({ const smartfile = new SmartFile({
contentBuffer: contentBufferArg, contentBuffer: contentBufferArg,
base: baseArg, base: baseArg,
path: plugins.path.relative(baseArg, filePath), path: plugins.path.relative(baseArg, filePath),
@ -52,7 +51,7 @@ export class Smartfile extends plugins.smartjson.Smartjson {
encodingArg: 'utf8' | 'binary', encodingArg: 'utf8' | 'binary',
baseArg = process.cwd() baseArg = process.cwd()
) { ) {
const smartfile = new Smartfile({ const smartfile = new SmartFile({
contentBuffer: Buffer.from(contentStringArg, encodingArg), contentBuffer: Buffer.from(contentStringArg, encodingArg),
base: baseArg, base: baseArg,
path: plugins.path.relative(baseArg, filePath), path: plugins.path.relative(baseArg, filePath),
@ -62,7 +61,34 @@ export class Smartfile extends plugins.smartjson.Smartjson {
} }
public static async fromFoldedJson(foldedJsonArg: string) { public static async fromFoldedJson(foldedJsonArg: string) {
return new Smartfile(plugins.smartjson.parse(foldedJsonArg)); return new SmartFile(plugins.smartjson.parse(foldedJsonArg));
}
/**
* creates a Smartfile from a ReadableStream
* @param stream a readable stream that provides file content
* @param filePath the file path to associate with the content
* @param baseArg the base path to use for the file
*/
public static async fromStream(
stream: plugins.stream.Readable,
filePath: string,
baseArg: string = process.cwd()
): Promise<SmartFile> {
return new Promise<SmartFile>((resolve, reject) => {
const chunks: Buffer[] = [];
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
stream.on('error', (error) => reject(error));
stream.on('end', () => {
const contentBuffer = Buffer.concat(chunks);
const smartfile = new SmartFile({
contentBuffer: contentBuffer,
base: baseArg,
path: plugins.path.relative(baseArg, filePath),
});
resolve(smartfile);
});
});
} }
// ======== // ========
@ -128,7 +154,7 @@ export class Smartfile extends plugins.smartjson.Smartjson {
* @param contentString * @param contentString
*/ */
public setContentsFromString(contentString: string, encodingArg: 'utf8' | 'binary' = 'utf8') { public setContentsFromString(contentString: string, encodingArg: 'utf8' | 'binary' = 'utf8') {
this.contents = new Buffer(contentString, encodingArg); this.contents = Buffer.from(contentString, encodingArg);
} }
/** /**
@ -237,9 +263,39 @@ export class Smartfile extends plugins.smartjson.Smartjson {
return false; return false;
} }
public async getHash(typeArg: 'path' | 'content' | 'all' = 'all') {
const pathHash = await plugins.smarthash.sha256FromString(this.path);
const contentHash = await plugins.smarthash.sha256FromBuffer(this.contentBuffer);
const combinedHash = await plugins.smarthash.sha256FromString(pathHash + contentHash);
switch (typeArg) {
case 'path':
return pathHash;
case 'content':
return contentHash;
case 'all':
default:
return combinedHash;
}
}
// update things // update things
public updateFileName(fileNameArg: string) { public updateFileName(fileNameArg: string) {
const oldFileName = this.parsedPath.base; const oldFileName = this.parsedPath.base;
this.path = this.path.replace(new RegExp(oldFileName + '$'), fileNameArg); this.path = this.path.replace(new RegExp(oldFileName + '$'), fileNameArg);
} }
public async editContentAsString(editFuncArg: (fileStringArg: string) => Promise<string>) {
const newFileString = await editFuncArg(this.contentBuffer.toString());
this.contentBuffer = Buffer.from(newFileString);
}
/**
* Returns a ReadableStream from the file's content buffer
*/
public getStream(): plugins.stream.Readable {
const stream = new plugins.stream.Readable();
stream.push(this.contentBuffer); // Push the content buffer to the stream
stream.push(null); // Push null to signify the end of the stream (EOF)
return stream;
}
} }

152
ts/classes.streamfile.ts Normal file
View File

@ -0,0 +1,152 @@
import * as plugins from './smartfile.plugins.js';
import * as smartfileFs from './fs.js';
import * as smartfileFsStream from './fsstream.js';
import { Readable } from 'stream';
type TStreamSource = (streamFile: StreamFile) => Promise<Readable>;
/**
* The StreamFile class represents a file as a stream.
* It allows creating streams from a file path, a URL, or a buffer.
*/
export class StreamFile {
// INSTANCE
relativeFilePath?: string;
private streamSource: TStreamSource;
// enable stream based multi use
private cachedStreamBuffer?: Buffer;
public multiUse: boolean;
public used: boolean = false;
private constructor(streamSource: TStreamSource, relativeFilePath?: string) {
this.streamSource = streamSource;
this.relativeFilePath = relativeFilePath;
}
// STATIC
public static async fromPath(filePath: string): Promise<StreamFile> {
const streamSource: TStreamSource = async (stremFileArg) => smartfileFsStream.createReadStream(filePath);
const streamFile = new StreamFile(streamSource, filePath);
streamFile.multiUse = true;
return streamFile;
}
public static async fromUrl(url: string): Promise<StreamFile> {
const streamSource: TStreamSource = async (streamFileArg) => plugins.smartrequest.getStream(url); // Replace with actual plugin method
const streamFile = new StreamFile(streamSource);
streamFile.multiUse = true;
return streamFile;
}
public static fromBuffer(buffer: Buffer, relativeFilePath?: string): StreamFile {
const streamSource: TStreamSource = async (streamFileArg) => {
const stream = new Readable();
stream.push(buffer);
stream.push(null); // End of stream
return stream;
};
const streamFile = new StreamFile(streamSource, relativeFilePath);
streamFile.multiUse = true;
return streamFile;
}
/**
* Creates a StreamFile from an existing Readable stream with an option for multiple uses.
* @param stream A Node.js Readable stream.
* @param relativeFilePath Optional file path for the stream.
* @param multiUse If true, the stream can be read multiple times, caching its content.
* @returns A StreamFile instance.
*/
public static fromStream(stream: Readable, relativeFilePath?: string, multiUse: boolean = false): StreamFile {
const streamSource: TStreamSource = (streamFileArg) => {
if (streamFileArg.multiUse) {
// If multi-use is enabled and we have cached content, create a new readable stream from the buffer
const bufferedStream = new Readable();
bufferedStream.push(streamFileArg.cachedStreamBuffer);
bufferedStream.push(null); // No more data to push
return Promise.resolve(bufferedStream);
} else {
return Promise.resolve(stream);
}
};
const streamFile = new StreamFile(streamSource, relativeFilePath);
streamFile.multiUse = multiUse;
// If multi-use is enabled, cache the stream when it's first read
if (multiUse) {
const chunks: Buffer[] = [];
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
stream.on('end', () => {
streamFile.cachedStreamBuffer = Buffer.concat(chunks);
});
// It's important to handle errors that may occur during streaming
stream.on('error', (err) => {
console.error('Error while caching stream:', err);
});
}
return streamFile;
}
// METHODS
private checkMultiUse() {
if (!this.multiUse && this.used) {
throw new Error('This stream can only be used once.');
}
this.used = true;
}
/**
* Creates a new readable stream from the source.
*/
public async createReadStream(): Promise<Readable> {
return this.streamSource(this);
}
/**
* Writes the stream to the disk at the specified path.
* @param filePathArg The file path where the stream should be written.
*/
public async writeToDisk(filePathArg: string): Promise<void> {
this.checkMultiUse();
const readStream = await this.createReadStream();
const writeStream = smartfileFsStream.createWriteStream(filePathArg);
return new Promise((resolve, reject) => {
readStream.pipe(writeStream);
readStream.on('error', reject);
writeStream.on('error', reject);
writeStream.on('finish', resolve);
});
}
public async writeToDir(dirPathArg: string) {
this.checkMultiUse();
const filePath = plugins.path.join(dirPathArg, this.relativeFilePath);
await smartfileFs.ensureDir(plugins.path.parse(filePath).dir);
return this.writeToDisk(filePath);
}
public async getContentAsBuffer() {
this.checkMultiUse();
const done = plugins.smartpromise.defer<Buffer>();
const readStream = await this.createReadStream();
const chunks: Buffer[] = [];
readStream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
readStream.on('error', done.reject);
readStream.on('end', () => {
const contentBuffer = Buffer.concat(chunks);
done.resolve(contentBuffer);
});
return done.promise;
}
public async getContentAsString(formatArg: 'utf8' | 'binary' = 'utf8') {
const contentBuffer = await this.getContentAsBuffer();
return contentBuffer.toString(formatArg);
}
}

View File

@ -1,11 +1,21 @@
import { Smartfile } from './smartfile.classes.smartfile.js'; import { SmartFile } from './classes.smartfile.js';
import * as plugins from './smartfile.plugins.js'; import * as plugins from './smartfile.plugins.js';
import * as fs from './smartfile.fs.js'; import * as fs from './fs.js';
export interface IVirtualDirectoryConstructorOptions {
mode: ''
}
/** /**
* a virtual directory exposes a fs api * a virtual directory exposes a fs api
*/ */
export class VirtualDirectory { export class VirtualDirectory {
consstructor(options = {}) {
}
// STATIC // STATIC
public static async fromFsDirPath(pathArg: string): Promise<VirtualDirectory> { public static async fromFsDirPath(pathArg: string): Promise<VirtualDirectory> {
const newVirtualDir = new VirtualDirectory(); const newVirtualDir = new VirtualDirectory();
@ -18,17 +28,17 @@ export class VirtualDirectory {
): Promise<VirtualDirectory> { ): Promise<VirtualDirectory> {
const newVirtualDir = new VirtualDirectory(); const newVirtualDir = new VirtualDirectory();
for (const fileArg of virtualDirTransferableObjectArg.files) { for (const fileArg of virtualDirTransferableObjectArg.files) {
newVirtualDir.addSmartfiles([Smartfile.enfoldFromJson(fileArg) as Smartfile]); newVirtualDir.addSmartfiles([SmartFile.enfoldFromJson(fileArg) as SmartFile]);
} }
return newVirtualDir; return newVirtualDir;
} }
// INSTANCE // INSTANCE
public smartfileArray: Smartfile[] = []; public smartfileArray: SmartFile[] = [];
constructor() {} constructor() {}
public addSmartfiles(smartfileArrayArg: Smartfile[]) { public addSmartfiles(smartfileArrayArg: SmartFile[]) {
this.smartfileArray = this.smartfileArray.concat(smartfileArrayArg); this.smartfileArray = this.smartfileArray.concat(smartfileArrayArg);
} }
@ -47,7 +57,7 @@ export class VirtualDirectory {
} }
public async saveToDisk(dirArg: string) { public async saveToDisk(dirArg: string) {
console.log(`writing VirtualDirectory with ${this.smartfileArray.length} to directory: console.log(`writing VirtualDirectory with ${this.smartfileArray.length} files to directory:
--> ${dirArg}`); --> ${dirArg}`);
for (const smartfileArg of this.smartfileArray) { for (const smartfileArg of this.smartfileArray) {
const filePath = await smartfileArg.writeToDir(dirArg); const filePath = await smartfileArg.writeToDir(dirArg);
@ -56,6 +66,22 @@ export class VirtualDirectory {
} }
} }
// TODO implement root shifting to get subdirectories as new virtual directories public async shiftToSubdirectory(subDir: string): Promise<VirtualDirectory> {
// TODO implement root shifting to combine VirtualDirecotries in a parent virtual directory const newVirtualDir = new VirtualDirectory();
for (const file of this.smartfileArray) {
if (file.path.startsWith(subDir)) {
const adjustedFilePath = plugins.path.relative(subDir, file.path);
file.path = adjustedFilePath;
newVirtualDir.addSmartfiles([file]);
}
}
return newVirtualDir;
}
public async addVirtualDirectory(virtualDir: VirtualDirectory, newRoot: string): Promise<void> {
for (const file of virtualDir.smartfileArray) {
file.path = plugins.path.join(newRoot, file.path);
}
this.addSmartfiles(virtualDir.smartfileArray);
}
} }

View File

@ -1,9 +1,9 @@
import * as plugins from './smartfile.plugins.js'; import * as plugins from './smartfile.plugins.js';
import * as interpreter from './smartfile.interpreter.js'; import * as interpreter from './interpreter.js';
import { Smartfile } from './smartfile.classes.smartfile.js'; import { SmartFile } from './classes.smartfile.js';
import * as memory from './smartfile.memory.js'; import * as memory from './memory.js';
/*=============================================================== /*===============================================================
============================ Checks ============================= ============================ Checks =============================
===============================================================*/ ===============================================================*/
@ -40,7 +40,18 @@ export const fileExists = async (filePath): Promise<boolean> => {
/** /**
* Checks if given path points to an existing directory * Checks if given path points to an existing directory
*/ */
export const isDirectory = (pathArg): boolean => { export const isDirectory = (pathArg: string): boolean => {
try {
return plugins.fsExtra.statSync(pathArg).isDirectory();
} catch (err) {
return false;
}
};
/**
* Checks if given path points to an existing directory
*/
export const isDirectorySync = (pathArg: string): boolean => {
try { try {
return plugins.fsExtra.statSync(pathArg).isDirectory(); return plugins.fsExtra.statSync(pathArg).isDirectory();
} catch (err) { } catch (err) {
@ -210,11 +221,23 @@ export const toBufferSync = (filePath: string): Buffer => {
return plugins.fsExtra.readFileSync(filePath); return plugins.fsExtra.readFileSync(filePath);
}; };
/**
* Creates a Readable Stream from a file path.
* @param filePath The path to the file.
* @returns {fs.ReadStream}
*/
export const toReadStream = (filePath: string): plugins.fs.ReadStream => {
if (!fileExistsSync(filePath)) {
throw new Error(`File does not exist at path: ${filePath}`);
}
return plugins.fsExtra.createReadStream(filePath);
};
export const fileTreeToHash = async (dirPathArg: string, miniMatchFilter: string) => { export const fileTreeToHash = async (dirPathArg: string, miniMatchFilter: string) => {
const fileTreeObject = await fileTreeToObject(dirPathArg, miniMatchFilter); const fileTreeObject = await fileTreeToObject(dirPathArg, miniMatchFilter);
let combinedString = ''; let combinedString = '';
for (const smartfile of fileTreeObject) { for (const smartfile of fileTreeObject) {
combinedString += smartfile.contentBuffer.toString(); combinedString += await smartfile.getHash();
} }
const hash = await plugins.smarthash.sha256FromString(combinedString); const hash = await plugins.smarthash.sha256FromString(combinedString);
return hash; return hash;
@ -235,7 +258,7 @@ export const fileTreeToObject = async (dirPathArg: string, miniMatchFilter: stri
} }
const fileTree = await listFileTree(dirPath, miniMatchFilter); const fileTree = await listFileTree(dirPath, miniMatchFilter);
const smartfileArray: Smartfile[] = []; const smartfileArray: SmartFile[] = [];
for (const filePath of fileTree) { for (const filePath of fileTree) {
const readPath = ((): string => { const readPath = ((): string => {
if (!plugins.path.isAbsolute(filePath)) { if (!plugins.path.isAbsolute(filePath)) {
@ -244,12 +267,12 @@ export const fileTreeToObject = async (dirPathArg: string, miniMatchFilter: stri
return filePath; return filePath;
} }
})(); })();
const fileContentString = toStringSync(readPath); const fileBuffer = plugins.fs.readFileSync(readPath);
// push a read file as Smartfile // push a read file as Smartfile
smartfileArray.push( smartfileArray.push(
new Smartfile({ new SmartFile({
contentBuffer: Buffer.from(fileContentString), contentBuffer: fileBuffer,
base: dirPath, base: dirPath,
path: filePath, path: filePath,
}) })
@ -341,8 +364,6 @@ export const listFileTree = async (
miniMatchFilter: string, miniMatchFilter: string,
absolutePathsBool: boolean = false absolutePathsBool: boolean = false
): Promise<string[]> => { ): Promise<string[]> => {
const done = plugins.smartpromise.defer<string[]>();
// handle absolute miniMatchFilter // handle absolute miniMatchFilter
let dirPath: string; let dirPath: string;
if (plugins.path.isAbsolute(miniMatchFilter)) { if (plugins.path.isAbsolute(miniMatchFilter)) {
@ -356,15 +377,8 @@ export const listFileTree = async (
nodir: true, nodir: true,
dot: true, dot: true,
}; };
plugins.glob(miniMatchFilter, options, (err, files: string[]) => {
if (err) {
console.log(err);
done.reject(err);
}
done.resolve(files);
});
let fileList = await done.promise; let fileList = await plugins.glob.glob(miniMatchFilter, options);
if (absolutePathsBool) { if (absolutePathsBool) {
fileList = fileList.map((filePath) => { fileList = fileList.map((filePath) => {
return plugins.path.resolve(plugins.path.join(dirPath, filePath)); return plugins.path.resolve(plugins.path.join(dirPath, filePath));

39
ts/fsstream.ts Normal file
View File

@ -0,0 +1,39 @@
/*
This file contains logic for streaming things from and to the filesystem
*/
import * as plugins from './smartfile.plugins.js';
export const createReadStream = (pathArg: string) => {
return plugins.fs.createReadStream(pathArg);
};
export const createWriteStream = (pathArg: string) => {
return plugins.fs.createWriteStream(pathArg);
};
export const processFile = async (
filePath: string,
asyncFunc: (fileStream: plugins.stream.Readable) => Promise<void>
): Promise<void> => {
return new Promise((resolve, reject) => {
const fileStream = createReadStream(filePath);
asyncFunc(fileStream).then(resolve).catch(reject);
});
}
export const processDirectory = async (
directoryPath: string,
asyncFunc: (fileStream: plugins.stream.Readable) => Promise<void>
): Promise<void> => {
const files = plugins.fs.readdirSync(directoryPath, { withFileTypes: true });
for (const file of files) {
const fullPath = plugins.path.join(directoryPath, file.name);
if (file.isDirectory()) {
await processDirectory(fullPath, asyncFunc); // Recursively call processDirectory for directories
} else if (file.isFile()) {
await processFile(fullPath, asyncFunc); // Call async function with the file stream and wait for it
}
}
};

View File

@ -1,11 +1,12 @@
import * as plugins from './smartfile.plugins.js'; import * as plugins from './smartfile.plugins.js';
import * as fsMod from './smartfile.fs.js'; import * as fsMod from './fs.js';
import * as fsStreamMod from './smartfile.fsstream.js'; import * as fsStreamMod from './fsstream.js';
import * as interpreterMod from './smartfile.interpreter.js'; import * as interpreterMod from './interpreter.js';
import * as memoryMod from './smartfile.memory.js'; import * as memoryMod from './memory.js';
export * from './smartfile.classes.smartfile.js'; export * from './classes.smartfile.js';
export * from './smartfile.classes.virtualdirectory.js'; export * from './classes.streamfile.js';
export * from './classes.virtualdirectory.js';
export const fs = fsMod; export const fs = fsMod;
export const fsStream = fsStreamMod; export const fsStream = fsStreamMod;

View File

@ -1,7 +1,8 @@
import * as plugins from './smartfile.plugins.js'; import * as plugins from './smartfile.plugins.js';
import { Smartfile } from './smartfile.classes.smartfile.js'; import { SmartFile } from './classes.smartfile.js';
import * as smartfileFs from './smartfile.fs.js'; import * as smartfileFs from './fs.js';
import * as interpreter from './smartfile.interpreter.js'; import * as interpreter from './interpreter.js';
import type { StreamFile } from './classes.streamfile.js';
/** /**
* converts file to Object * converts file to Object
@ -24,7 +25,7 @@ export interface IToFsOptions {
* @param fileBaseArg * @param fileBaseArg
*/ */
export let toFs = async ( export let toFs = async (
fileContentArg: string | Buffer | Smartfile, fileContentArg: string | Buffer | SmartFile | StreamFile,
filePathArg: string, filePathArg: string,
optionsArg: IToFsOptions = {} optionsArg: IToFsOptions = {}
) => { ) => {
@ -41,7 +42,7 @@ export let toFs = async (
let filePath: string = filePathArg; let filePath: string = filePathArg;
// handle Smartfile // handle Smartfile
if (fileContentArg instanceof Smartfile) { if (fileContentArg instanceof SmartFile) {
fileContent = fileContentArg.contentBuffer; fileContent = fileContentArg.contentBuffer;
// handle options // handle options
if (optionsArg.respectRelative) { if (optionsArg.respectRelative) {
@ -83,7 +84,7 @@ export const toFsSync = (fileArg: string, filePathArg: string) => {
plugins.fsExtra.writeFileSync(filePath, fileString, { encoding: 'utf8' }); plugins.fsExtra.writeFileSync(filePath, fileString, { encoding: 'utf8' });
}; };
export let smartfileArrayToFs = async (smartfileArrayArg: Smartfile[], dirArg: string) => { export let smartfileArrayToFs = async (smartfileArrayArg: SmartFile[], dirArg: string) => {
await smartfileFs.ensureDir(dirArg); await smartfileFs.ensureDir(dirArg);
for (const smartfile of smartfileArrayArg) { for (const smartfile of smartfileArrayArg) {
await toFs(smartfile, dirArg, { await toFs(smartfile, dirArg, {

View File

@ -1,17 +0,0 @@
/*
This file contains logic for streaming things from and to the filesystem
*/
import * as plugins from './smartfile.plugins.js';
export const createReadStream = (pathArg: string) => {
return plugins.fs.createReadStream(pathArg);
};
export const createWriteStream = (pathArg: string) => {
return plugins.fs.createWriteStream(pathArg);
};
export const streamDirectory = async (dirPathArg: string) => {
}

View File

@ -1,20 +1,21 @@
// node native scope // node native scope
import * as fs from 'fs'; import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
import * as stream from 'stream';
export { fs, path }; export { fs, path, stream };
// @pushrocks scope // @pushrocks scope
import * as lik from '@pushrocks/lik'; import * as lik from '@push.rocks/lik';
import * as smartfileInterfaces from '@pushrocks/smartfile-interfaces'; import * as smartfileInterfaces from '@push.rocks/smartfile-interfaces';
import * as smartdelay from '@pushrocks/smartdelay'; import * as smartdelay from '@push.rocks/smartdelay';
import * as smarthash from '@pushrocks/smarthash'; import * as smarthash from '@push.rocks/smarthash';
import * as smartjson from '@pushrocks/smartjson'; import * as smartjson from '@push.rocks/smartjson';
import * as smartmime from '@pushrocks/smartmime'; import * as smartmime from '@push.rocks/smartmime';
import * as smartpath from '@pushrocks/smartpath'; import * as smartpath from '@push.rocks/smartpath';
import * as smartpromise from '@pushrocks/smartpromise'; import * as smartpromise from '@push.rocks/smartpromise';
import * as smartrequest from '@pushrocks/smartrequest'; import * as smartrequest from '@push.rocks/smartrequest';
import * as smartstream from '@pushrocks/smartstream'; import * as smartstream from '@push.rocks/smartstream';
export { export {
lik, lik,
@ -26,12 +27,12 @@ export {
smartpath, smartpath,
smartpromise, smartpromise,
smartrequest, smartrequest,
smartstream smartstream,
}; };
// third party scope // third party scope
import fsExtra from 'fs-extra'; import fsExtra from 'fs-extra';
import glob from 'glob'; import * as glob from 'glob';
import yaml from 'js-yaml'; import yaml from 'js-yaml';
export { fsExtra, glob, yaml }; export { fsExtra, glob, yaml };

View File

@ -1,8 +1,11 @@
{ {
"compilerOptions": { "compilerOptions": {
"experimentalDecorators": true, "experimentalDecorators": true,
"useDefineForClassFields": false,
"target": "ES2022",
"module": "ES2022",
"moduleResolution": "nodenext",
"esModuleInterop": true, "esModuleInterop": true,
"target": "ES2017", "verbatimModuleSyntax": true,
"moduleResolution": "node"
} }
} }