Compare commits

...

33 Commits

Author SHA1 Message Date
3b05aab39b 11.0.2 2023-11-07 21:32:00 +01:00
53be2eb59d fix(core): update 2023-11-07 21:32:00 +01:00
c92a0dddbd 11.0.1 2023-11-07 14:09:49 +01:00
27403a73b5 fix(core): update 2023-11-07 14:09:48 +01:00
b925e5e662 11.0.0 2023-11-06 11:15:12 +01:00
98a5d2c94d BREAKING CHANGE(core): update 2023-11-06 11:15:11 +01:00
0e735cba20 10.0.40 2023-11-04 20:54:14 +01:00
f815457801 fix(core): update 2023-11-04 20:54:13 +01:00
f7e47ae354 10.0.39 2023-11-04 20:43:55 +01:00
684e893801 fix(core): update 2023-11-04 20:43:54 +01:00
d4b381d33d 10.0.38 2023-11-04 20:14:21 +01:00
291a11aa60 fix(core): update 2023-11-04 20:14:20 +01:00
ca592afec9 update 2023-11-04 20:07:43 +01:00
8b07197224 10.0.37 2023-11-03 02:31:57 +01:00
b60fd15ec6 fix(core): update 2023-11-03 02:31:57 +01:00
853eccc780 10.0.36 2023-11-03 02:24:37 +01:00
c26aff85b5 fix(core): update 2023-11-03 02:24:36 +01:00
321e4d9dea 10.0.35 2023-11-03 01:25:38 +01:00
3d2789857c fix(core): update 2023-11-03 01:25:37 +01:00
07b88a078d 10.0.34 2023-11-03 00:41:06 +01:00
6fee0028d8 fix(core): update 2023-11-03 00:41:05 +01:00
629c52f9bc 10.0.33 2023-10-12 02:21:40 +02:00
fd056c29e9 fix(core): update 2023-10-12 02:21:39 +02:00
36c456b509 10.0.32 2023-09-22 17:05:35 +02:00
16f8c25557 fix(core): update 2023-09-22 17:05:35 +02:00
219e070ba2 10.0.31 2023-08-31 18:45:24 +02:00
ee97e1d88b fix(core): update 2023-08-31 18:45:23 +02:00
279db74568 10.0.30 2023-08-23 10:58:38 +02:00
b84c504f11 fix(core): update 2023-08-23 10:58:38 +02:00
7b3194cc13 10.0.29 2023-08-23 09:38:49 +02:00
e1e821efec fix(core): update 2023-08-23 09:38:49 +02:00
6b613d1b8a 10.0.28 2023-07-12 10:00:40 +02:00
70f1c58a82 fix(core): update 2023-07-12 10:00:40 +02:00
21 changed files with 2667 additions and 1068 deletions

View File

@ -7,14 +7,13 @@ on:
env:
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
jobs:
security:
runs-on: ubuntu-latest
continue-on-error: true
@ -22,27 +21,27 @@ jobs:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v3
- name: Install pnpm and npmci
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
- name: Install pnpm and npmci
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
- name: Run npm prepare
run: npmci npm prepare
- name: Run npm prepare
run: npmci npm prepare
- name: Audit production dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --prod
continue-on-error: true
- name: Audit production dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --prod
continue-on-error: true
- name: Audit development dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --dev
continue-on-error: true
- name: Audit development dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --dev
continue-on-error: true
test:
if: ${{ always() }}
@ -52,16 +51,16 @@ jobs:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v3
- name: Test stable
run: |
npmci node install stable
npmci npm install
npmci npm test
- name: Test stable
run: |
npmci node install stable
npmci npm install
npmci npm test
- name: Test build
run: |
npmci node install stable
npmci npm install
npmci npm build
- name: Test build
run: |
npmci node install stable
npmci npm install
npmci npm build

View File

@ -7,14 +7,13 @@ on:
env:
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
jobs:
security:
runs-on: ubuntu-latest
continue-on-error: true
@ -22,27 +21,25 @@ jobs:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v3
- name: Install pnpm and npmci
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
npmci npm prepare
- name: Run npm prepare
run: npmci npm prepare
- name: Audit production dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --prod
continue-on-error: true
- name: Audit production dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --prod
continue-on-error: true
- name: Audit development dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --dev
continue-on-error: true
- name: Audit development dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --dev
continue-on-error: true
test:
if: ${{ always() }}
@ -52,19 +49,25 @@ jobs:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v3
- name: Test stable
run: |
npmci node install stable
npmci npm install
npmci npm test
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
npmci npm prepare
- name: Test build
run: |
npmci node install stable
npmci npm install
npmci npm build
- name: Test stable
run: |
npmci node install stable
npmci npm install
npmci npm test
- name: Test build
run: |
npmci node install stable
npmci npm install
npmci npm build
release:
needs: test
@ -74,12 +77,18 @@ jobs:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v3
- name: Release
run: |
npmci node install stable
npmci npm publish
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
npmci npm prepare
- name: Release
run: |
npmci node install stable
npmci npm publish
metadata:
needs: test
@ -90,21 +99,26 @@ jobs:
continue-on-error: true
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v3
- name: Code quality
run: |
npmci command npm install -g typescript
npmci npm prepare
npmci npm install
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
npmci npm prepare
- name: Trigger
run: npmci trigger
- name: Code quality
run: |
npmci command npm install -g typescript
npmci npm install
- name: Build docs
run: |
npmci node install stable
npmci npm install
pnpm install -g @gitzone/tsdoc
npmci command tsdoc
continue-on-error: true
- name: Trigger
run: npmci trigger
- name: Build docs and upload artifacts
run: |
npmci node install stable
npmci npm install
pnpm install -g @git.zone/tsdoc
npmci command tsdoc
continue-on-error: true

View File

@ -1,7 +1,7 @@
{
"name": "@push.rocks/smartfile",
"private": false,
"version": "10.0.27",
"version": "11.0.2",
"description": "offers smart ways to work with files in nodejs",
"main": "dist_ts/index.js",
"typings": "dist_ts/index.d.ts",
@ -13,7 +13,7 @@
},
"repository": {
"type": "git",
"url": "https://gitlab.com/pushrocks/smartfile.git"
"url": "git+https://gitlab.com/push.rocks/smartfile.git"
},
"keywords": [
"filesystem",
@ -22,34 +22,33 @@
"author": "Lossless GmbH <hello@lossless.com> (https://lossless.com)",
"license": "MIT",
"bugs": {
"url": "https://gitlab.com/pushrocks/smartfile/issues"
"url": "https://gitlab.com/push.rocks/smartfile/issues"
},
"homepage": "https://gitlab.com/pushrocks/smartfile",
"homepage": "https://gitlab.com/push.rocks/smartfile#readme",
"dependencies": {
"@pushrocks/lik": "^6.0.2",
"@pushrocks/smartdelay": "^3.0.1",
"@pushrocks/smartfile-interfaces": "^1.0.7",
"@pushrocks/smarthash": "^3.0.2",
"@pushrocks/smartjson": "^5.0.6",
"@pushrocks/smartmime": "^1.0.5",
"@pushrocks/smartpath": "^5.0.5",
"@pushrocks/smartpromise": "^4.0.2",
"@pushrocks/smartrequest": "^2.0.15",
"@pushrocks/smartstream": "^2.0.3",
"@pushrocks/streamfunction": "^4.0.4",
"@types/fs-extra": "^11.0.1",
"@push.rocks/lik": "^6.0.5",
"@push.rocks/smartdelay": "^3.0.5",
"@push.rocks/smartfile-interfaces": "^1.0.7",
"@push.rocks/smarthash": "^3.0.4",
"@push.rocks/smartjson": "^5.0.10",
"@push.rocks/smartmime": "^1.0.5",
"@push.rocks/smartpath": "^5.0.11",
"@push.rocks/smartpromise": "^4.0.2",
"@push.rocks/smartrequest": "^2.0.20",
"@push.rocks/smartstream": "^3.0.7",
"@types/fs-extra": "^11.0.3",
"@types/glob": "^8.1.0",
"@types/js-yaml": "^4.0.5",
"@types/js-yaml": "^4.0.8",
"fs-extra": "^11.1.1",
"glob": "^10.3.3",
"glob": "^10.3.10",
"js-yaml": "^4.1.0"
},
"devDependencies": {
"@gitzone/tsbuild": "^2.1.66",
"@gitzone/tsrun": "^1.2.42",
"@gitzone/tstest": "^1.0.74",
"@pushrocks/tapbundle": "^5.0.8",
"@types/node": "^20.4.1"
"@git.zone/tsbuild": "^2.1.70",
"@git.zone/tsrun": "^1.2.46",
"@git.zone/tstest": "^1.0.81",
"@push.rocks/tapbundle": "^5.0.15",
"@types/node": "^20.8.10"
},
"files": [
"ts/**/*",

2798
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@ -1,26 +1,26 @@
# @pushrocks/smartfile
# @push.rocks/smartfile
smart ways to work with files in nodejs
## Availabililty and Links
* [npmjs.org (npm package)](https://www.npmjs.com/package/@pushrocks/smartfile)
* [gitlab.com (source)](https://gitlab.com/pushrocks/smartfile)
* [github.com (source mirror)](https://github.com/pushrocks/smartfile)
* [docs (typedoc)](https://pushrocks.gitlab.io/smartfile/)
* [npmjs.org (npm package)](https://www.npmjs.com/package/@push.rocks/smartfile)
* [gitlab.com (source)](https://gitlab.com/push.rocks/smartfile)
* [github.com (source mirror)](https://github.com/push.rocks/smartfile)
* [docs (typedoc)](https://push.rocks.gitlab.io/smartfile/)
## Status for master
Status Category | Status Badge
-- | --
GitLab Pipelines | [![pipeline status](https://gitlab.com/pushrocks/smartfile/badges/master/pipeline.svg)](https://lossless.cloud)
GitLab Pipline Test Coverage | [![coverage report](https://gitlab.com/pushrocks/smartfile/badges/master/coverage.svg)](https://lossless.cloud)
npm | [![npm downloads per month](https://badgen.net/npm/dy/@pushrocks/smartfile)](https://lossless.cloud)
Snyk | [![Known Vulnerabilities](https://badgen.net/snyk/pushrocks/smartfile)](https://lossless.cloud)
GitLab Pipelines | [![pipeline status](https://gitlab.com/push.rocks/smartfile/badges/master/pipeline.svg)](https://lossless.cloud)
GitLab Pipline Test Coverage | [![coverage report](https://gitlab.com/push.rocks/smartfile/badges/master/coverage.svg)](https://lossless.cloud)
npm | [![npm downloads per month](https://badgen.net/npm/dy/@push.rocks/smartfile)](https://lossless.cloud)
Snyk | [![Known Vulnerabilities](https://badgen.net/snyk/push.rocks/smartfile)](https://lossless.cloud)
TypeScript Support | [![TypeScript](https://badgen.net/badge/TypeScript/>=%203.x/blue?icon=typescript)](https://lossless.cloud)
node Support | [![node](https://img.shields.io/badge/node->=%2010.x.x-blue.svg)](https://nodejs.org/dist/latest-v10.x/docs/api/)
Code Style | [![Code Style](https://badgen.net/badge/style/prettier/purple)](https://lossless.cloud)
PackagePhobia (total standalone install weight) | [![PackagePhobia](https://badgen.net/packagephobia/install/@pushrocks/smartfile)](https://lossless.cloud)
PackagePhobia (package size on registry) | [![PackagePhobia](https://badgen.net/packagephobia/publish/@pushrocks/smartfile)](https://lossless.cloud)
BundlePhobia (total size when bundled) | [![BundlePhobia](https://badgen.net/bundlephobia/minzip/@pushrocks/smartfile)](https://lossless.cloud)
PackagePhobia (total standalone install weight) | [![PackagePhobia](https://badgen.net/packagephobia/install/@push.rocks/smartfile)](https://lossless.cloud)
PackagePhobia (package size on registry) | [![PackagePhobia](https://badgen.net/packagephobia/publish/@push.rocks/smartfile)](https://lossless.cloud)
BundlePhobia (total size when bundled) | [![BundlePhobia](https://badgen.net/bundlephobia/minzip/@push.rocks/smartfile)](https://lossless.cloud)
## Usage

67
test/test.streamfile.ts Normal file
View File

@ -0,0 +1,67 @@
import * as path from 'path';
import { expect, tap } from '@push.rocks/tapbundle';
import * as smartfile from '../ts/index.js'; // adjust the import path as needed
// Test assets path
const testAssetsPath = './test/testassets/';
// ---------------------------
// StreamFile tests
// ---------------------------
tap.test('StreamFile.fromPath should create a StreamFile from a file path', async () => {
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
expect(streamFile).toBeInstanceOf(smartfile.StreamFile);
const contentBuffer = await streamFile.getContentAsBuffer();
expect(contentBuffer).toBeInstanceOf(Buffer);
});
tap.test('StreamFile.fromUrl should create a StreamFile from a URL', async () => {
const streamFile = await smartfile.StreamFile.fromUrl('http://example.com/somefile.json');
expect(streamFile).toBeInstanceOf(smartfile.StreamFile);
});
tap.test('StreamFile.fromBuffer should create a StreamFile from a Buffer', async () => {
const buffer = Buffer.from('Some content');
const streamFile = smartfile.StreamFile.fromBuffer(buffer, 'bufferfile.txt');
expect(streamFile).toBeInstanceOf(smartfile.StreamFile);
});
tap.test('StreamFile should write the stream to disk', async () => {
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
await streamFile.writeToDisk(path.join(testAssetsPath, 'temp', 'mytest.json'));
// Verify the file was written
expect(
// We'll use the fileExists method from your smartfile library
// Replace with the actual method you use to check file existence
await smartfile.fs.fileExists(path.join(testAssetsPath, 'temp', 'mytest.json'))
).toBeTrue();
});
tap.test('StreamFile should write to a directory', async () => {
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
await streamFile.writeToDir(path.join(testAssetsPath, 'temp'));
// Verify the file was written
expect(
await smartfile.fs.fileExists(path.join(testAssetsPath, 'temp', 'mytest.json'))
).toBeTrue();
});
tap.test('StreamFile should return content as a buffer', async () => {
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
const contentBuffer = await streamFile.getContentAsBuffer();
expect(contentBuffer).toBeInstanceOf(Buffer);
// Further checks on the content can be added here if necessary
});
tap.test('StreamFile should return content as a string', async () => {
const streamFile = await smartfile.StreamFile.fromPath(path.join(testAssetsPath, 'mytest.json'));
const contentString = await streamFile.getContentAsString();
expect(typeof contentString).toBeTypeofString();
// Verify the content matches what's expected
// This assumes the file contains a JSON object with a key 'key1' with value 'this works'
expect(JSON.parse(contentString).key1).toEqual('this works');
});
// Start the test sequence
tap.start();

View File

@ -1,7 +1,7 @@
import * as smartfile from '../ts/index.js';
import * as path from 'path';
import { expect, tap } from '@pushrocks/tapbundle';
import { expect, tap } from '@push.rocks/tapbundle';
// ---------------------------
// smartfile.fs
@ -64,7 +64,7 @@ tap.test('.fs.fileTreeToObject -> should read a file tree into an Object', async
path.resolve('./test/testassets/'),
'**/*.txt'
);
expect(fileArrayArg[0]).toBeInstanceOf(smartfile.Smartfile);
expect(fileArrayArg[0]).toBeInstanceOf(smartfile.SmartFile);
expect(fileArrayArg[0].contents.toString()).toEqual(fileArrayArg[0].contentBuffer.toString());
});
@ -175,7 +175,7 @@ tap.test('.Smartfile -> should produce vinyl compatible files', async () => {
'./test/testassets/testfolder/**/*'
);
const localSmartfile = smartfileArray[0];
expect(localSmartfile).toBeInstanceOf(smartfile.Smartfile);
expect(localSmartfile).toBeInstanceOf(smartfile.SmartFile);
expect(localSmartfile.contents).toBeInstanceOf(Buffer);
// tslint:disable-next-line:no-unused-expression
expect(localSmartfile.isBuffer()).toBeTrue();
@ -202,9 +202,9 @@ tap.test('should output a smartfile array to disk', async () => {
tap.test('should create, store and retrieve valid smartfiles', async () => {
const fileString = 'hi there';
const filePath = './test/testassets/utf8.txt';
const smartfileInstance = await smartfile.Smartfile.fromString(filePath, fileString, 'utf8');
const smartfileInstance = await smartfile.SmartFile.fromString(filePath, fileString, 'utf8');
smartfileInstance.write();
const smartfileInstance2 = await smartfile.Smartfile.fromFilePath(filePath);
const smartfileInstance2 = await smartfile.SmartFile.fromFilePath(filePath);
const retrievedString = smartfileInstance.contents.toString();
expect(retrievedString).toEqual(fileString);
});
@ -212,7 +212,7 @@ tap.test('should create, store and retrieve valid smartfiles', async () => {
tap.test('should get a hash', async () => {
const fileString = 'hi there';
const filePath = './test/testassets/utf8.txt';
const smartfileInstance = await smartfile.Smartfile.fromString(filePath, fileString, 'utf8');
const smartfileInstance = await smartfile.SmartFile.fromString(filePath, fileString, 'utf8');
const hash = await smartfileInstance.getHash();
console.log(hash);
});

View File

@ -1,4 +1,4 @@
import { tap, expect } from '@pushrocks/tapbundle';
import { tap, expect } from '@push.rocks/tapbundle';
import * as smartfile from '../ts/index.js';

View File

@ -0,0 +1,8 @@
{
"key1": "this works",
"key2": "this works too",
"key3": {
"nestedkey1": "hello"
}
}

View File

@ -0,0 +1,8 @@
{
"key1": "this works",
"key2": "this works too",
"key3": {
"nestedkey1": "hello"
}
}

View File

@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@push.rocks/smartfile',
version: '10.0.27',
version: '11.0.2',
description: 'offers smart ways to work with files in nodejs'
}

View File

@ -1,6 +1,6 @@
import * as plugins from './smartfile.plugins.js';
import * as fs from './smartfile.fs.js';
import * as memory from './smartfile.memory.js';
import * as fs from './fs.js';
import * as memory from './memory.js';
export interface ISmartfileConstructorOptions {
path: string;
@ -9,10 +9,9 @@ export interface ISmartfileConstructorOptions {
}
/**
* class Smartfile
* -> is vinyl file compatible
* an vinyl file compatible in memory file class
*/
export class Smartfile extends plugins.smartjson.Smartjson {
export class SmartFile extends plugins.smartjson.Smartjson {
// ======
// STATIC
// ======
@ -24,7 +23,7 @@ export class Smartfile extends plugins.smartjson.Smartjson {
public static async fromFilePath(filePath: string, baseArg: string = process.cwd()) {
filePath = plugins.path.resolve(filePath);
const fileBuffer = fs.toBufferSync(filePath);
const smartfile = new Smartfile({
const smartfile = new SmartFile({
contentBuffer: fileBuffer,
base: baseArg,
path: plugins.path.relative(baseArg, filePath),
@ -37,7 +36,7 @@ export class Smartfile extends plugins.smartjson.Smartjson {
contentBufferArg: Buffer,
baseArg: string = process.cwd()
) {
const smartfile = new Smartfile({
const smartfile = new SmartFile({
contentBuffer: contentBufferArg,
base: baseArg,
path: plugins.path.relative(baseArg, filePath),
@ -52,7 +51,7 @@ export class Smartfile extends plugins.smartjson.Smartjson {
encodingArg: 'utf8' | 'binary',
baseArg = process.cwd()
) {
const smartfile = new Smartfile({
const smartfile = new SmartFile({
contentBuffer: Buffer.from(contentStringArg, encodingArg),
base: baseArg,
path: plugins.path.relative(baseArg, filePath),
@ -62,7 +61,34 @@ export class Smartfile extends plugins.smartjson.Smartjson {
}
public static async fromFoldedJson(foldedJsonArg: string) {
return new Smartfile(plugins.smartjson.parse(foldedJsonArg));
return new SmartFile(plugins.smartjson.parse(foldedJsonArg));
}
/**
* creates a Smartfile from a ReadableStream
* @param stream a readable stream that provides file content
* @param filePath the file path to associate with the content
* @param baseArg the base path to use for the file
*/
public static async fromStream(
stream: plugins.stream.Readable,
filePath: string,
baseArg: string = process.cwd()
): Promise<SmartFile> {
return new Promise<SmartFile>((resolve, reject) => {
const chunks: Buffer[] = [];
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
stream.on('error', (error) => reject(error));
stream.on('end', () => {
const contentBuffer = Buffer.concat(chunks);
const smartfile = new SmartFile({
contentBuffer: contentBuffer,
base: baseArg,
path: plugins.path.relative(baseArg, filePath),
});
resolve(smartfile);
});
});
}
// ========
@ -128,7 +154,7 @@ export class Smartfile extends plugins.smartjson.Smartjson {
* @param contentString
*/
public setContentsFromString(contentString: string, encodingArg: 'utf8' | 'binary' = 'utf8') {
this.contents = new Buffer(contentString, encodingArg);
this.contents = Buffer.from(contentString, encodingArg);
}
/**
@ -262,4 +288,14 @@ export class Smartfile extends plugins.smartjson.Smartjson {
const newFileString = await editFuncArg(this.contentBuffer.toString());
this.contentBuffer = Buffer.from(newFileString);
}
/**
* Returns a ReadableStream from the file's content buffer
*/
public getStream(): plugins.stream.Readable {
const stream = new plugins.stream.Readable();
stream.push(this.contentBuffer); // Push the content buffer to the stream
stream.push(null); // Push null to signify the end of the stream (EOF)
return stream;
}
}

152
ts/classes.streamfile.ts Normal file
View File

@ -0,0 +1,152 @@
import * as plugins from './smartfile.plugins.js';
import * as smartfileFs from './fs.js';
import * as smartfileFsStream from './fsstream.js';
import { Readable } from 'stream';
type TStreamSource = (streamFile: StreamFile) => Promise<Readable>;
/**
* The StreamFile class represents a file as a stream.
* It allows creating streams from a file path, a URL, or a buffer.
*/
export class StreamFile {
// INSTANCE
relativeFilePath?: string;
private streamSource: TStreamSource;
// enable stream based multi use
private cachedStreamBuffer?: Buffer;
public multiUse: boolean;
public used: boolean = false;
private constructor(streamSource: TStreamSource, relativeFilePath?: string) {
this.streamSource = streamSource;
this.relativeFilePath = relativeFilePath;
}
// STATIC
public static async fromPath(filePath: string): Promise<StreamFile> {
const streamSource: TStreamSource = async (stremFileArg) => smartfileFsStream.createReadStream(filePath);
const streamFile = new StreamFile(streamSource, filePath);
streamFile.multiUse = true;
return streamFile;
}
public static async fromUrl(url: string): Promise<StreamFile> {
const streamSource: TStreamSource = async (streamFileArg) => plugins.smartrequest.getStream(url); // Replace with actual plugin method
const streamFile = new StreamFile(streamSource);
streamFile.multiUse = true;
return streamFile;
}
public static fromBuffer(buffer: Buffer, relativeFilePath?: string): StreamFile {
const streamSource: TStreamSource = async (streamFileArg) => {
const stream = new Readable();
stream.push(buffer);
stream.push(null); // End of stream
return stream;
};
const streamFile = new StreamFile(streamSource, relativeFilePath);
streamFile.multiUse = true;
return streamFile;
}
/**
* Creates a StreamFile from an existing Readable stream with an option for multiple uses.
* @param stream A Node.js Readable stream.
* @param relativeFilePath Optional file path for the stream.
* @param multiUse If true, the stream can be read multiple times, caching its content.
* @returns A StreamFile instance.
*/
public static fromStream(stream: Readable, relativeFilePath?: string, multiUse: boolean = false): StreamFile {
const streamSource: TStreamSource = (streamFileArg) => {
if (streamFileArg.multiUse) {
// If multi-use is enabled and we have cached content, create a new readable stream from the buffer
const bufferedStream = new Readable();
bufferedStream.push(streamFileArg.cachedStreamBuffer);
bufferedStream.push(null); // No more data to push
return Promise.resolve(bufferedStream);
} else {
return Promise.resolve(stream);
}
};
const streamFile = new StreamFile(streamSource, relativeFilePath);
streamFile.multiUse = multiUse;
// If multi-use is enabled, cache the stream when it's first read
if (multiUse) {
const chunks: Buffer[] = [];
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
stream.on('end', () => {
streamFile.cachedStreamBuffer = Buffer.concat(chunks);
});
// It's important to handle errors that may occur during streaming
stream.on('error', (err) => {
console.error('Error while caching stream:', err);
});
}
return streamFile;
}
// METHODS
private checkMultiUse() {
if (!this.multiUse && this.used) {
throw new Error('This stream can only be used once.');
}
this.used = true;
}
/**
* Creates a new readable stream from the source.
*/
public async createReadStream(): Promise<Readable> {
return this.streamSource(this);
}
/**
* Writes the stream to the disk at the specified path.
* @param filePathArg The file path where the stream should be written.
*/
public async writeToDisk(filePathArg: string): Promise<void> {
this.checkMultiUse();
const readStream = await this.createReadStream();
const writeStream = smartfileFsStream.createWriteStream(filePathArg);
return new Promise((resolve, reject) => {
readStream.pipe(writeStream);
readStream.on('error', reject);
writeStream.on('error', reject);
writeStream.on('finish', resolve);
});
}
public async writeToDir(dirPathArg: string) {
this.checkMultiUse();
const filePath = plugins.path.join(dirPathArg, this.relativeFilePath);
await smartfileFs.ensureDir(plugins.path.parse(filePath).dir);
return this.writeToDisk(filePath);
}
public async getContentAsBuffer() {
this.checkMultiUse();
const done = plugins.smartpromise.defer<Buffer>();
const readStream = await this.createReadStream();
const chunks: Buffer[] = [];
readStream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
readStream.on('error', done.reject);
readStream.on('end', () => {
const contentBuffer = Buffer.concat(chunks);
done.resolve(contentBuffer);
});
return done.promise;
}
public async getContentAsString(formatArg: 'utf8' | 'binary' = 'utf8') {
const contentBuffer = await this.getContentAsBuffer();
return contentBuffer.toString(formatArg);
}
}

View File

@ -1,11 +1,21 @@
import { Smartfile } from './smartfile.classes.smartfile.js';
import { SmartFile } from './classes.smartfile.js';
import * as plugins from './smartfile.plugins.js';
import * as fs from './smartfile.fs.js';
import * as fs from './fs.js';
export interface IVirtualDirectoryConstructorOptions {
mode: ''
}
/**
* a virtual directory exposes a fs api
*/
export class VirtualDirectory {
consstructor(options = {}) {
}
// STATIC
public static async fromFsDirPath(pathArg: string): Promise<VirtualDirectory> {
const newVirtualDir = new VirtualDirectory();
@ -18,17 +28,17 @@ export class VirtualDirectory {
): Promise<VirtualDirectory> {
const newVirtualDir = new VirtualDirectory();
for (const fileArg of virtualDirTransferableObjectArg.files) {
newVirtualDir.addSmartfiles([Smartfile.enfoldFromJson(fileArg) as Smartfile]);
newVirtualDir.addSmartfiles([SmartFile.enfoldFromJson(fileArg) as SmartFile]);
}
return newVirtualDir;
}
// INSTANCE
public smartfileArray: Smartfile[] = [];
public smartfileArray: SmartFile[] = [];
constructor() {}
public addSmartfiles(smartfileArrayArg: Smartfile[]) {
public addSmartfiles(smartfileArrayArg: SmartFile[]) {
this.smartfileArray = this.smartfileArray.concat(smartfileArrayArg);
}

View File

@ -1,9 +1,9 @@
import * as plugins from './smartfile.plugins.js';
import * as interpreter from './smartfile.interpreter.js';
import * as interpreter from './interpreter.js';
import { Smartfile } from './smartfile.classes.smartfile.js';
import { SmartFile } from './classes.smartfile.js';
import * as memory from './smartfile.memory.js';
import * as memory from './memory.js';
/*===============================================================
============================ Checks =============================
===============================================================*/
@ -40,7 +40,18 @@ export const fileExists = async (filePath): Promise<boolean> => {
/**
* Checks if given path points to an existing directory
*/
export const isDirectory = (pathArg): boolean => {
export const isDirectory = (pathArg: string): boolean => {
try {
return plugins.fsExtra.statSync(pathArg).isDirectory();
} catch (err) {
return false;
}
};
/**
* Checks if given path points to an existing directory
*/
export const isDirectorySync = (pathArg: string): boolean => {
try {
return plugins.fsExtra.statSync(pathArg).isDirectory();
} catch (err) {
@ -210,6 +221,18 @@ export const toBufferSync = (filePath: string): Buffer => {
return plugins.fsExtra.readFileSync(filePath);
};
/**
* Creates a Readable Stream from a file path.
* @param filePath The path to the file.
* @returns {fs.ReadStream}
*/
export const toReadStream = (filePath: string): plugins.fs.ReadStream => {
if (!fileExistsSync(filePath)) {
throw new Error(`File does not exist at path: ${filePath}`);
}
return plugins.fsExtra.createReadStream(filePath);
};
export const fileTreeToHash = async (dirPathArg: string, miniMatchFilter: string) => {
const fileTreeObject = await fileTreeToObject(dirPathArg, miniMatchFilter);
let combinedString = '';
@ -235,7 +258,7 @@ export const fileTreeToObject = async (dirPathArg: string, miniMatchFilter: stri
}
const fileTree = await listFileTree(dirPath, miniMatchFilter);
const smartfileArray: Smartfile[] = [];
const smartfileArray: SmartFile[] = [];
for (const filePath of fileTree) {
const readPath = ((): string => {
if (!plugins.path.isAbsolute(filePath)) {
@ -244,12 +267,12 @@ export const fileTreeToObject = async (dirPathArg: string, miniMatchFilter: stri
return filePath;
}
})();
const fileContentString = toStringSync(readPath);
const fileBuffer = plugins.fs.readFileSync(readPath);
// push a read file as Smartfile
smartfileArray.push(
new Smartfile({
contentBuffer: Buffer.from(fileContentString),
new SmartFile({
contentBuffer: fileBuffer,
base: dirPath,
path: filePath,
})
@ -366,27 +389,52 @@ export const listFileTree = async (
};
/**
* checks wether a file is ready for processing
* Watches for file stability before resolving the promise.
*/
export const waitForFileToBeReady = async (filePathArg: string): Promise<void> => {
if (!plugins.path.isAbsolute(filePathArg)) {
filePathArg = plugins.path.resolve(filePathArg);
}
const limitedArray = new plugins.lik.LimitedArray<number>(3);
let fileReady = false;
while (!fileReady) {
const stats = await plugins.fsExtra.stat(filePathArg);
limitedArray.addOne(stats.size);
if (
limitedArray.array.length < 3 ||
!(
limitedArray.array[0] === limitedArray.array[1] &&
limitedArray.array[1] === limitedArray.array[2]
)
) {
await plugins.smartdelay.delayFor(5000);
} else {
fileReady = true;
}
}
export const waitForFileToBeReady = (filePathArg: string): Promise<void> => {
return new Promise((resolve, reject) => {
let lastSize = -1;
let stableCheckTimeout: NodeJS.Timeout | null = null;
const clearStableCheckTimeout = () => {
if (stableCheckTimeout) {
clearTimeout(stableCheckTimeout);
stableCheckTimeout = null;
}
};
const watcher = plugins.fs.watch(filePathArg, (eventType, filename) => {
if (eventType === 'change') {
plugins.fs.stat(filePathArg, (err, stats) => {
if (err) {
watcher.close();
clearStableCheckTimeout();
reject(err);
return;
}
if (stats.size === lastSize) {
clearStableCheckTimeout();
stableCheckTimeout = setTimeout(() => {
watcher.close();
resolve();
}, 5000); // stability duration
} else {
lastSize = stats.size;
}
});
}
});
watcher.on('error', (error) => {
clearStableCheckTimeout();
watcher.close();
reject(error);
});
});
};

195
ts/fsstream.ts Normal file
View File

@ -0,0 +1,195 @@
/*
This file contains logic for streaming things from and to the filesystem
*/
import * as plugins from './smartfile.plugins.js';
export const createReadStream = (pathArg: string) => {
return plugins.fs.createReadStream(pathArg);
};
export const createWriteStream = (pathArg: string) => {
return plugins.fs.createWriteStream(pathArg);
};
export const processFile = async (
filePath: string,
asyncFunc: (fileStream: plugins.stream.Readable) => Promise<void>
): Promise<void> => {
return new Promise((resolve, reject) => {
const fileStream = createReadStream(filePath);
asyncFunc(fileStream).then(resolve).catch(reject);
});
}
export const processDirectory = async (
directoryPath: string,
asyncFunc: (fileStream: plugins.stream.Readable) => Promise<void>
): Promise<void> => {
const files = plugins.fs.readdirSync(directoryPath, { withFileTypes: true });
for (const file of files) {
const fullPath = plugins.path.join(directoryPath, file.name);
if (file.isDirectory()) {
await processDirectory(fullPath, asyncFunc); // Recursively call processDirectory for directories
} else if (file.isFile()) {
await processFile(fullPath, asyncFunc); // Call async function with the file stream and wait for it
}
}
};
/**
* Checks if a file is ready to be streamed (exists and is not empty).
*/
export const isFileReadyForStreaming = async (filePathArg: string): Promise<boolean> => {
try {
const stats = await plugins.fs.promises.stat(filePathArg);
return stats.size > 0;
} catch (error) {
if (error.code === 'ENOENT') { // File does not exist
return false;
}
throw error; // Rethrow other unexpected errors
}
};
/**
* Waits for a file to be ready for streaming (exists and is not empty).
*/
export const waitForFileToBeReadyForStreaming = (filePathArg: string): Promise<void> => {
return new Promise((resolve, reject) => {
// Normalize and resolve the file path
const filePath = plugins.path.resolve(filePathArg);
// Function to check file stats
const checkFile = (resolve: () => void, reject: (reason: any) => void) => {
plugins.fs.stat(filePath, (err, stats) => {
if (err) {
if (err.code === 'ENOENT') {
// File not found, wait and try again
return;
}
// Some other error occurred
return reject(err);
}
if (stats.size > 0) {
// File exists and is not empty, resolve the promise
resolve();
}
});
};
// Set up file watcher
const watcher = plugins.fs.watch(filePath, { persistent: false }, (eventType) => {
if (eventType === 'change' || eventType === 'rename') {
checkFile(resolve, reject);
}
});
// Check file immediately in case it's already ready
checkFile(resolve, reject);
// Error handling
watcher.on('error', (error) => {
watcher.close();
reject(error);
});
});
};
class SmartReadStream extends plugins.stream.Readable {
private watcher: plugins.fs.FSWatcher | null = null;
private lastReadSize: number = 0;
private endTimeout: NodeJS.Timeout | null = null;
private filePath: string;
private endDelay: number;
private reading: boolean = false;
constructor(filePath: string, endDelay = 60000, opts?: plugins.stream.ReadableOptions) {
super(opts);
this.filePath = filePath;
this.endDelay = endDelay;
}
private startWatching(): void {
this.watcher = plugins.fs.watch(this.filePath, (eventType) => {
if (eventType === 'change') {
this.resetEndTimeout();
}
});
this.watcher.on('error', (error) => {
this.cleanup();
this.emit('error', error);
});
}
private resetEndTimeout(): void {
if (this.endTimeout) clearTimeout(this.endTimeout);
this.endTimeout = setTimeout(() => this.checkForEnd(), this.endDelay);
}
private checkForEnd(): void {
plugins.fs.stat(this.filePath, (err, stats) => {
if (err) {
this.emit('error', err);
return;
}
if (this.lastReadSize === stats.size) {
this.push(null); // Signal the end of the stream
this.cleanup();
} else {
this.lastReadSize = stats.size;
this.resetEndTimeout();
if (!this.reading) {
// We only want to continue reading if we were previously waiting for more data
this.reading = true;
this._read(10000); // Try to read more data
}
}
});
}
private cleanup(): void {
if (this.endTimeout) clearTimeout(this.endTimeout);
if (this.watcher) this.watcher.close();
}
_read(size: number): void {
this.reading = true;
const chunkSize = Math.min(size, 16384); // Read in chunks of 16KB
const buffer = Buffer.alloc(chunkSize);
plugins.fs.open(this.filePath, 'r', (err, fd) => {
if (err) {
this.emit('error', err);
return;
}
plugins.fs.read(fd, buffer, 0, chunkSize, this.lastReadSize, (err, bytesRead, buffer) => {
if (err) {
this.emit('error', err);
return;
}
if (bytesRead > 0) {
this.lastReadSize += bytesRead;
this.push(buffer.slice(0, bytesRead)); // Push the data onto the stream
} else {
this.reading = false; // No more data to read for now
this.resetEndTimeout();
}
plugins.fs.close(fd, (err) => {
if (err) {
this.emit('error', err);
}
});
});
});
}
_destroy(error: Error | null, callback: (error: Error | null) => void): void {
this.cleanup();
callback(error);
}
}

View File

@ -1,11 +1,12 @@
import * as plugins from './smartfile.plugins.js';
import * as fsMod from './smartfile.fs.js';
import * as fsStreamMod from './smartfile.fsstream.js';
import * as interpreterMod from './smartfile.interpreter.js';
import * as memoryMod from './smartfile.memory.js';
import * as fsMod from './fs.js';
import * as fsStreamMod from './fsstream.js';
import * as interpreterMod from './interpreter.js';
import * as memoryMod from './memory.js';
export * from './smartfile.classes.smartfile.js';
export * from './smartfile.classes.virtualdirectory.js';
export * from './classes.smartfile.js';
export * from './classes.streamfile.js';
export * from './classes.virtualdirectory.js';
export const fs = fsMod;
export const fsStream = fsStreamMod;

View File

@ -1,7 +1,8 @@
import * as plugins from './smartfile.plugins.js';
import { Smartfile } from './smartfile.classes.smartfile.js';
import * as smartfileFs from './smartfile.fs.js';
import * as interpreter from './smartfile.interpreter.js';
import { SmartFile } from './classes.smartfile.js';
import * as smartfileFs from './fs.js';
import * as interpreter from './interpreter.js';
import type { StreamFile } from './classes.streamfile.js';
/**
* converts file to Object
@ -24,7 +25,7 @@ export interface IToFsOptions {
* @param fileBaseArg
*/
export let toFs = async (
fileContentArg: string | Buffer | Smartfile,
fileContentArg: string | Buffer | SmartFile | StreamFile,
filePathArg: string,
optionsArg: IToFsOptions = {}
) => {
@ -41,7 +42,7 @@ export let toFs = async (
let filePath: string = filePathArg;
// handle Smartfile
if (fileContentArg instanceof Smartfile) {
if (fileContentArg instanceof SmartFile) {
fileContent = fileContentArg.contentBuffer;
// handle options
if (optionsArg.respectRelative) {
@ -83,7 +84,7 @@ export const toFsSync = (fileArg: string, filePathArg: string) => {
plugins.fsExtra.writeFileSync(filePath, fileString, { encoding: 'utf8' });
};
export let smartfileArrayToFs = async (smartfileArrayArg: Smartfile[], dirArg: string) => {
export let smartfileArrayToFs = async (smartfileArrayArg: SmartFile[], dirArg: string) => {
await smartfileFs.ensureDir(dirArg);
for (const smartfile of smartfileArrayArg) {
await toFs(smartfile, dirArg, {

View File

@ -1,14 +0,0 @@
/*
This file contains logic for streaming things from and to the filesystem
*/
import * as plugins from './smartfile.plugins.js';
export const createReadStream = (pathArg: string) => {
return plugins.fs.createReadStream(pathArg);
};
export const createWriteStream = (pathArg: string) => {
return plugins.fs.createWriteStream(pathArg);
};
export const streamDirectory = async (dirPathArg: string) => {};

View File

@ -1,20 +1,21 @@
// node native scope
import * as fs from 'fs';
import * as path from 'path';
import * as stream from 'stream';
export { fs, path };
export { fs, path, stream };
// @pushrocks scope
import * as lik from '@pushrocks/lik';
import * as smartfileInterfaces from '@pushrocks/smartfile-interfaces';
import * as smartdelay from '@pushrocks/smartdelay';
import * as smarthash from '@pushrocks/smarthash';
import * as smartjson from '@pushrocks/smartjson';
import * as smartmime from '@pushrocks/smartmime';
import * as smartpath from '@pushrocks/smartpath';
import * as smartpromise from '@pushrocks/smartpromise';
import * as smartrequest from '@pushrocks/smartrequest';
import * as smartstream from '@pushrocks/smartstream';
import * as lik from '@push.rocks/lik';
import * as smartfileInterfaces from '@push.rocks/smartfile-interfaces';
import * as smartdelay from '@push.rocks/smartdelay';
import * as smarthash from '@push.rocks/smarthash';
import * as smartjson from '@push.rocks/smartjson';
import * as smartmime from '@push.rocks/smartmime';
import * as smartpath from '@push.rocks/smartpath';
import * as smartpromise from '@push.rocks/smartpromise';
import * as smartrequest from '@push.rocks/smartrequest';
import * as smartstream from '@push.rocks/smartstream';
export {
lik,