feat(archive): introduce ts_shared browser-compatible layer, refactor Node-specific tools to wrap/shared implementations, and modernize archive handling

This commit is contained in:
2026-01-01 23:09:06 +00:00
parent 4e3c5a8443
commit 6393527c95
37 changed files with 2850 additions and 5105 deletions

View File

@@ -1,5 +1,17 @@
# Changelog
## 2026-01-01 - 5.1.0 - feat(archive)
introduce ts_shared browser-compatible layer, refactor Node-specific tools to wrap/shared implementations, and modernize archive handling
- Split code into ts_shared (browser-compatible) and ts_web entrypoint; node-specific wrappers remain under ts/
- Switched TAR implementation from tar-stream to modern-tar and replaced stream-based TAR handling with Uint8Array-based pack/unpack
- Normalized shared APIs to use Uint8Array (instead of Buffer) for browser compatibility — callers may need to adapt Buffer/Uint8Array usage (breaking)
- Moved BZIP2, GZIP, ZIP, TAR logic into ts_shared and updated plugins to re-export shared plugins for web builds
- Adjusted classes.smartarchive to consume shared tools and convert between Buffer and Uint8Array where needed
- Added package.json exports for "." and "./web", bumped several dependency/devDependency versions, and added modern-tar and related changes
- Updated npmextra.json with new scoped configuration and release registries
- Removed pnpm-workspace.yaml entries and deleted several legacy Node-only files in favor of shared implementations
## 2025-11-25 - 5.0.1 - fix(ziptools,gziptools)
Use fflate synchronous APIs for ZIP and GZIP operations for Deno compatibility; add TEntryFilter type and small docs/tests cleanup

1532
deno.lock generated

File diff suppressed because it is too large Load Diff

8
dist_ts/index.d.ts vendored
View File

@@ -1,8 +1,4 @@
export * from './interfaces.js';
export * from './errors.js';
export * from '../ts_shared/index.js';
export * from './classes.smartarchive.js';
export * from './classes.tartools.js';
export * from './classes.ziptools.js';
export * from './classes.gziptools.js';
export * from './classes.bzip2tools.js';
export * from './classes.archiveanalyzer.js';
export { TarTools } from './classes.tartools.js';

View File

@@ -1,13 +1,9 @@
// Core types and errors
export * from './interfaces.js';
export * from './errors.js';
// Main archive class
// Re-export everything from ts_shared (browser-compatible)
export * from '../ts_shared/index.js';
// Node.js-specific: Main archive class with filesystem support
export * from './classes.smartarchive.js';
// Format-specific tools
export * from './classes.tartools.js';
export * from './classes.ziptools.js';
export * from './classes.gziptools.js';
export * from './classes.bzip2tools.js';
// Archive analysis
// Node.js-specific: Archive analysis with SmartArchive integration
export * from './classes.archiveanalyzer.js';
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSx3QkFBd0I7QUFDeEIsY0FBYyxpQkFBaUIsQ0FBQztBQUNoQyxjQUFjLGFBQWEsQ0FBQztBQUU1QixxQkFBcUI7QUFDckIsY0FBYywyQkFBMkIsQ0FBQztBQUUxQyx3QkFBd0I7QUFDeEIsY0FBYyx1QkFBdUIsQ0FBQztBQUN0QyxjQUFjLHVCQUF1QixDQUFDO0FBQ3RDLGNBQWMsd0JBQXdCLENBQUM7QUFDdkMsY0FBYyx5QkFBeUIsQ0FBQztBQUV4QyxtQkFBbUI7QUFDbkIsY0FBYyw4QkFBOEIsQ0FBQyJ9
// Node.js-specific: Extended TarTools with filesystem support (overrides shared TarTools)
export { TarTools } from './classes.tartools.js';
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi90cy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSwyREFBMkQ7QUFDM0QsY0FBYyx1QkFBdUIsQ0FBQztBQUV0QywrREFBK0Q7QUFDL0QsY0FBYywyQkFBMkIsQ0FBQztBQUUxQyxtRUFBbUU7QUFDbkUsY0FBYyw4QkFBOEIsQ0FBQztBQUU3QywwRkFBMEY7QUFDMUYsT0FBTyxFQUFFLFFBQVEsRUFBRSxNQUFNLHVCQUF1QixDQUFDIn0=

View File

@@ -1,34 +0,0 @@
/// <reference types="node" resolution-mode="require"/>
import * as plugins from './smartarchive.plugins.js';
export declare class SmartArchive {
constructor();
/**
* extracts an archive from a given url
*/
extractArchiveFromUrlToFs(urlArg: string, targetDir: string): Promise<void>;
/**
* extracts an archive from a given filePath on disk
* @param filePathArg
* @param targetDirArg
*/
extractArchiveFromFilePathToFs(filePathArg: string, targetDirArg: string): Promise<void>;
/**
* extracts to Observable
* where the Observable is emitting smartfiles
*/
extractArchiveFromBufferToObservable(bufferArg: Buffer): Promise<plugins.smartrx.rxjs.ReplaySubject<plugins.smartfile.Smartfile>>;
extractArchiveWithIntakeAndReplaySubject(): {
intake: plugins.smartstream.StreamIntake<Buffer>;
replaySubject: plugins.smartrx.rxjs.ReplaySubject<plugins.smartfile.Smartfile>;
};
/**
* extracts to Observable
*/
extractArchiveFromUrlToObservable(urlArg: string): Promise<plugins.smartrx.rxjs.ReplaySubject<plugins.smartfile.Smartfile>>;
extractArchiveFromUrlToStream(): Promise<void>;
extractArchiveFromFilePathToStream(): Promise<void>;
extractArchiveFromStreamToStream(): Promise<void>;
packFromStreamToStream(): Promise<void>;
packFromDirPathToStream(): Promise<void>;
packFromDirPathToFs(): Promise<void>;
}

File diff suppressed because one or more lines are too long

View File

@@ -1,2 +0,0 @@
export declare const packageDir: string;
export declare const nogitDir: string;

View File

@@ -1,4 +0,0 @@
import * as plugins from './smartarchive.plugins.js';
export const packageDir = plugins.path.join(plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url), '../');
export const nogitDir = plugins.path.join(packageDir, './.nogit');
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic21hcnRhcmNoaXZlLnBhdGhzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vdHMvc21hcnRhcmNoaXZlLnBhdGhzLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sS0FBSyxPQUFPLE1BQU0sMkJBQTJCLENBQUM7QUFFckQsTUFBTSxDQUFDLE1BQU0sVUFBVSxHQUFHLE9BQU8sQ0FBQyxJQUFJLENBQUMsSUFBSSxDQUN6QyxPQUFPLENBQUMsU0FBUyxDQUFDLEdBQUcsQ0FBQyx3QkFBd0IsQ0FBQyxNQUFNLENBQUMsSUFBSSxDQUFDLEdBQUcsQ0FBQyxFQUMvRCxLQUFLLENBQ04sQ0FBQztBQUNGLE1BQU0sQ0FBQyxNQUFNLFFBQVEsR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxVQUFVLEVBQUUsVUFBVSxDQUFDLENBQUMifQ==

View File

@@ -1,14 +0,0 @@
import * as path from 'path';
export { path };
import * as smartfile from '@push.rocks/smartfile';
import * as smartpath from '@push.rocks/smartpath';
import * as smartpromise from '@push.rocks/smartpromise';
import * as smartrequest from '@push.rocks/smartrequest';
import * as smartunique from '@push.rocks/smartunique';
import * as smartstream from '@push.rocks/smartstream';
import * as smartrx from '@push.rocks/smartrx';
export { smartfile, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx };
import gunzipMaybe from 'gunzip-maybe';
import tar from 'tar';
import tarStream from 'tar-stream';
export { gunzipMaybe, tar, tarStream };

View File

@@ -1,19 +0,0 @@
// node native scope
import * as path from 'path';
export { path };
// @pushrocks scope
import * as smartfile from '@push.rocks/smartfile';
import * as smartpath from '@push.rocks/smartpath';
import * as smartpromise from '@push.rocks/smartpromise';
import * as smartrequest from '@push.rocks/smartrequest';
import * as smartunique from '@push.rocks/smartunique';
import * as smartstream from '@push.rocks/smartstream';
import * as smartrx from '@push.rocks/smartrx';
export { smartfile, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx };
// third party scope
import gunzipMaybe from 'gunzip-maybe';
// @ts-ignore
import tar from 'tar';
import tarStream from 'tar-stream';
export { gunzipMaybe, tar, tarStream };
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic21hcnRhcmNoaXZlLnBsdWdpbnMuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9zbWFydGFyY2hpdmUucGx1Z2lucy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxvQkFBb0I7QUFDcEIsT0FBTyxLQUFLLElBQUksTUFBTSxNQUFNLENBQUM7QUFFN0IsT0FBTyxFQUFFLElBQUksRUFBRSxDQUFDO0FBRWhCLG1CQUFtQjtBQUNuQixPQUFPLEtBQUssU0FBUyxNQUFNLHVCQUF1QixDQUFDO0FBQ25ELE9BQU8sS0FBSyxTQUFTLE1BQU0sdUJBQXVCLENBQUM7QUFDbkQsT0FBTyxLQUFLLFlBQVksTUFBTSwwQkFBMEIsQ0FBQztBQUN6RCxPQUFPLEtBQUssWUFBWSxNQUFNLDBCQUEwQixDQUFDO0FBQ3pELE9BQU8sS0FBSyxXQUFXLE1BQU0seUJBQXlCLENBQUM7QUFDdkQsT0FBTyxLQUFLLFdBQVcsTUFBTSx5QkFBeUIsQ0FBQztBQUN2RCxPQUFPLEtBQUssT0FBTyxNQUFNLHFCQUFxQixDQUFDO0FBRS9DLE9BQU8sRUFBRSxTQUFTLEVBQUUsU0FBUyxFQUFFLFlBQVksRUFBRSxZQUFZLEVBQUUsV0FBVyxFQUFFLFdBQVcsRUFBRSxPQUFPLEVBQUUsQ0FBQztBQUUvRixvQkFBb0I7QUFDcEIsT0FBTyxXQUFXLE1BQU0sY0FBYyxDQUFDO0FBRXZDLGFBQWE7QUFDYixPQUFPLEdBQUcsTUFBTSxLQUFLLENBQUM7QUFDdEIsT0FBTyxTQUFTLE1BQU0sWUFBWSxDQUFDO0FBRW5DLE9BQU8sRUFBRSxXQUFXLEVBQUUsR0FBRyxFQUFFLFNBQVMsRUFBRSxDQUFDIn0=

View File

@@ -1,9 +1,5 @@
{
"npmci": {
"npmGlobalTools": [],
"npmAccessLevel": "public"
},
"gitzone": {
"@git.zone/cli": {
"projectType": "npm",
"module": {
"githost": "code.foss.global",
@@ -25,9 +21,19 @@
"data analysis",
"file stream"
]
},
"release": {
"registries": [
"https://verdaccio.lossless.digital",
"https://registry.npmjs.org"
],
"accessLevel": "public"
}
},
"tsdoc": {
"@git.zone/tsdoc": {
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
},
"@ship.zone/szci": {
"npmGlobalTools": []
}
}
}

View File

@@ -5,6 +5,10 @@
"main": "dist_ts/index.js",
"typings": "dist_ts/index.d.ts",
"type": "module",
"exports": {
".": "./dist_ts/index.js",
"./web": "./dist_ts_web/index.js"
},
"scripts": {
"test": "(tstest test/ --verbose)",
"build": "tsbuild --web --allowimplicitany",
@@ -22,31 +26,32 @@
"homepage": "https://code.foss.global/push.rocks/smartarchive#readme",
"dependencies": {
"@push.rocks/smartdelay": "^3.0.5",
"@push.rocks/smartfile": "^13.0.0",
"@push.rocks/smartfile": "^13.1.2",
"@push.rocks/smartpath": "^6.0.0",
"@push.rocks/smartpromise": "^4.2.3",
"@push.rocks/smartrequest": "^4.2.2",
"@push.rocks/smartrequest": "^5.0.1",
"@push.rocks/smartrx": "^3.0.10",
"@push.rocks/smartstream": "^3.2.5",
"@push.rocks/smartunique": "^3.0.9",
"@push.rocks/smarturl": "^3.1.0",
"@types/tar-stream": "^3.1.4",
"fflate": "^0.8.2",
"file-type": "^21.0.0",
"tar-stream": "^3.1.7"
"file-type": "^21.2.0",
"modern-tar": "^0.7.3"
},
"devDependencies": {
"@git.zone/tsbuild": "^3.1.0",
"@git.zone/tsrun": "^2.0.0",
"@git.zone/tstest": "^3.1.3"
"@git.zone/tsbuild": "^4.0.2",
"@git.zone/tsrun": "^2.0.1",
"@git.zone/tstest": "^3.1.4"
},
"private": false,
"files": [
"ts/**/*",
"ts_shared/**/*",
"ts_web/**/*",
"dist/**/*",
"dist_*/**/*",
"dist_ts/**/*",
"dist_ts_shared/**/*",
"dist_ts_web/**/*",
"assets/**/*",
"cli.js",

5089
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +0,0 @@
onlyBuiltDependencies:
- esbuild
- mongodb-memory-server
- puppeteer

View File

@@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@push.rocks/smartarchive',
version: '5.0.1',
version: '5.1.0',
description: 'A library for working with archive files, providing utilities for compressing and decompressing data.'
}

View File

@@ -1,5 +1,5 @@
import type { SmartArchive } from './classes.smartarchive.js';
import type { TSupportedMime } from './interfaces.js';
import type { TSupportedMime } from '../ts_shared/interfaces.js';
import * as plugins from './plugins.js';
/**
@@ -8,7 +8,7 @@ import * as plugins from './plugins.js';
export type TDecompressionStream =
| plugins.stream.Transform
| plugins.stream.Duplex
| plugins.tarStream.Extract;
| plugins.smartstream.SmartDuplex<any, any>;
/**
* Result of archive analysis
@@ -53,14 +53,42 @@ export class ArchiveAnalyzer {
*/
private async getDecompressionStream(mimeTypeArg: TSupportedMime): Promise<TDecompressionStream> {
switch (mimeTypeArg) {
case 'application/gzip':
return this.smartArchiveRef.gzipTools.getDecompressionStream();
case 'application/gzip': {
// Use fflate streaming Gunzip - instance must be created once and reused
let gunzip: plugins.fflate.Gunzip;
return new plugins.stream.Transform({
construct(callback) {
gunzip = new plugins.fflate.Gunzip((data, final) => {
this.push(Buffer.from(data));
});
callback();
},
transform(chunk, encoding, callback) {
try {
gunzip.push(chunk, false);
callback();
} catch (err) {
callback(err as Error);
}
},
flush(callback) {
try {
// Signal end of stream with empty final chunk
gunzip.push(new Uint8Array(0), true);
callback();
} catch (err) {
callback(err as Error);
}
}
});
}
case 'application/zip':
return this.smartArchiveRef.zipTools.getDecompressionStream();
case 'application/x-bzip2':
return this.smartArchiveRef.bzip2Tools.getDecompressionStream();
case 'application/x-tar':
return this.smartArchiveRef.tarTools.getDecompressionStream();
// TAR doesn't need decompression, just pass through
return plugins.smartstream.createPassThrough();
default:
// Handle unsupported formats or no decompression needed
return plugins.smartstream.createPassThrough();

View File

@@ -1,16 +0,0 @@
import type { SmartArchive } from './classes.smartarchive.js';
import * as plugins from './plugins.js';
import { unbzip2Stream } from './bzip2/index.js';
export class Bzip2Tools {
smartArchiveRef: SmartArchive;
constructor(smartArchiveRefArg: SmartArchive) {
this.smartArchiveRef = smartArchiveRefArg;
}
getDecompressionStream() {
return unbzip2Stream();
}
}

View File

@@ -1,138 +0,0 @@
import * as plugins from './plugins.js';
import type { TCompressionLevel } from './interfaces.js';
/**
* Transform stream for GZIP compression using fflate
*/
export class GzipCompressionTransform extends plugins.stream.Transform {
private gzip: plugins.fflate.Gzip;
constructor(level: TCompressionLevel = 6) {
super();
// Create a streaming Gzip compressor
this.gzip = new plugins.fflate.Gzip({ level }, (chunk, final) => {
this.push(Buffer.from(chunk));
if (final) {
this.push(null);
}
});
}
_transform(
chunk: Buffer,
encoding: BufferEncoding,
callback: plugins.stream.TransformCallback
): void {
try {
this.gzip.push(chunk, false);
callback();
} catch (err) {
callback(err as Error);
}
}
_flush(callback: plugins.stream.TransformCallback): void {
try {
this.gzip.push(new Uint8Array(0), true);
callback();
} catch (err) {
callback(err as Error);
}
}
}
/**
* Transform stream for GZIP decompression using fflate
*/
export class GzipDecompressionTransform extends plugins.stream.Transform {
private gunzip: plugins.fflate.Gunzip;
constructor() {
super();
// Create a streaming Gunzip decompressor
this.gunzip = new plugins.fflate.Gunzip((chunk, final) => {
this.push(Buffer.from(chunk));
if (final) {
this.push(null);
}
});
}
_transform(
chunk: Buffer,
encoding: BufferEncoding,
callback: plugins.stream.TransformCallback
): void {
try {
this.gunzip.push(chunk, false);
callback();
} catch (err) {
callback(err as Error);
}
}
_flush(callback: plugins.stream.TransformCallback): void {
try {
this.gunzip.push(new Uint8Array(0), true);
callback();
} catch (err) {
callback(err as Error);
}
}
}
/**
* GZIP compression and decompression utilities
*/
export class GzipTools {
/**
* Get a streaming compression transform
*/
public getCompressionStream(level?: TCompressionLevel): plugins.stream.Transform {
return new GzipCompressionTransform(level);
}
/**
* Get a streaming decompression transform
*/
public getDecompressionStream(): plugins.stream.Transform {
return new GzipDecompressionTransform();
}
/**
* Compress data synchronously
*/
public compressSync(data: Buffer, level?: TCompressionLevel): Buffer {
const options = level !== undefined ? { level } : undefined;
return Buffer.from(plugins.fflate.gzipSync(data, options));
}
/**
* Decompress data synchronously
*/
public decompressSync(data: Buffer): Buffer {
return Buffer.from(plugins.fflate.gunzipSync(data));
}
/**
* Compress data asynchronously
* Note: Uses sync version for Deno compatibility (fflate async uses Web Workers
* which have issues in Deno)
*/
public async compress(data: Buffer, level?: TCompressionLevel): Promise<Buffer> {
// Use sync version wrapped in Promise for cross-runtime compatibility
return this.compressSync(data, level);
}
/**
* Decompress data asynchronously
* Note: Uses sync version for Deno compatibility (fflate async uses Web Workers
* which have issues in Deno)
*/
public async decompress(data: Buffer): Promise<Buffer> {
// Use sync version wrapped in Promise for cross-runtime compatibility
return this.decompressSync(data);
}
}

View File

@@ -6,12 +6,15 @@ import type {
TArchiveFormat,
TCompressionLevel,
TEntryFilter,
} from './interfaces.js';
} from '../ts_shared/interfaces.js';
import { Bzip2Tools } from './classes.bzip2tools.js';
import { GzipTools } from './classes.gziptools.js';
// Import browser-compatible tools from ts_shared
import { Bzip2Tools } from '../ts_shared/classes.bzip2tools.js';
import { GzipTools } from '../ts_shared/classes.gziptools.js';
import { ZipTools } from '../ts_shared/classes.ziptools.js';
// Import Node.js-extended TarTools
import { TarTools } from './classes.tartools.js';
import { ZipTools } from './classes.ziptools.js';
import { ArchiveAnalyzer, type IAnalyzedResult } from './classes.archiveanalyzer.js';
/**
@@ -62,7 +65,7 @@ export class SmartArchive {
public tarTools = new TarTools();
public zipTools = new ZipTools();
public gzipTools = new GzipTools();
public bzip2Tools = new Bzip2Tools(this);
public bzip2Tools = new Bzip2Tools();
public archiveAnalyzer = new ArchiveAnalyzer(this);
// ============================================
@@ -173,7 +176,7 @@ export class SmartArchive {
public entry(archivePath: string, content: string | Buffer): this {
this.ensureNotInExtractMode('entry');
if (!this._mode) this._mode = 'create';
this.pendingEntries.push({ archivePath, content });
this.pendingEntries.push({ archivePath, content: content instanceof Buffer ? new Uint8Array(content) : content });
return this;
}
@@ -184,7 +187,10 @@ export class SmartArchive {
this.ensureNotInExtractMode('entries');
if (!this._mode) this._mode = 'create';
for (const e of entriesArg) {
this.pendingEntries.push({ archivePath: e.archivePath, content: e.content });
this.pendingEntries.push({
archivePath: e.archivePath,
content: e.content instanceof Buffer ? new Uint8Array(e.content) : e.content
});
}
return this;
}
@@ -374,30 +380,36 @@ export class SmartArchive {
plugins.smartstream.createTransformFunction<IAnalyzedResult, void>(
async (analyzedResultChunk) => {
if (analyzedResultChunk.fileType?.mime === 'application/x-tar') {
const tarStream = analyzedResultChunk.decompressionStream as plugins.tarStream.Extract;
// Use modern-tar for TAR extraction
const chunks: Buffer[] = [];
tarStream.on('entry', async (header, stream, next) => {
if (header.type === 'directory') {
stream.resume();
stream.on('end', () => next());
return;
analyzedResultChunk.resultStream.on('data', (chunk: Buffer) => {
chunks.push(chunk);
});
analyzedResultChunk.resultStream.on('end', async () => {
try {
const tarBuffer = Buffer.concat(chunks);
const entries = await this.tarTools.extractTar(new Uint8Array(tarBuffer));
for (const entry of entries) {
if (entry.isDirectory) continue;
const streamFile = plugins.smartfile.StreamFile.fromBuffer(
Buffer.from(entry.content)
);
streamFile.relativeFilePath = entry.path;
streamFileIntake.push(streamFile);
}
safeSignalEnd();
} catch (err) {
streamFileIntake.emit('error', err);
}
const passThrough = new plugins.stream.PassThrough();
const streamfile = plugins.smartfile.StreamFile.fromStream(passThrough, header.name);
streamFileIntake.push(streamfile);
stream.pipe(passThrough);
stream.on('end', () => {
passThrough.end();
next();
});
});
tarStream.on('finish', () => {
safeSignalEnd();
analyzedResultChunk.resultStream.on('error', (err: Error) => {
streamFileIntake.emit('error', err);
});
analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream);
} else if (analyzedResultChunk.fileType?.mime === 'application/zip') {
analyzedResultChunk.resultStream
.pipe(analyzedResultChunk.decompressionStream)
@@ -544,25 +556,29 @@ export class SmartArchive {
if (this.creationFormat === 'tar' || this.creationFormat === 'tar.gz' || this.creationFormat === 'tgz') {
if (this.creationFormat === 'tar') {
this.archiveBuffer = await this.tarTools.packFiles(entries);
const result = await this.tarTools.packFiles(entries);
this.archiveBuffer = Buffer.from(result);
} else {
this.archiveBuffer = await this.tarTools.packFilesToTarGz(entries, this._compressionLevel);
const result = await this.tarTools.packFilesToTarGz(entries, this._compressionLevel);
this.archiveBuffer = Buffer.from(result);
}
} else if (this.creationFormat === 'zip') {
this.archiveBuffer = await this.zipTools.createZip(entries, this._compressionLevel);
const result = await this.zipTools.createZip(entries, this._compressionLevel);
this.archiveBuffer = Buffer.from(result);
} else if (this.creationFormat === 'gz') {
if (entries.length !== 1) {
throw new Error('GZIP format only supports a single file');
}
let content: Buffer;
let content: Uint8Array;
if (typeof entries[0].content === 'string') {
content = Buffer.from(entries[0].content);
} else if (Buffer.isBuffer(entries[0].content)) {
content = new TextEncoder().encode(entries[0].content);
} else if (entries[0].content instanceof Uint8Array) {
content = entries[0].content;
} else {
throw new Error('GZIP format requires string or Buffer content');
throw new Error('GZIP format requires string or Uint8Array content');
}
this.archiveBuffer = await this.gzipTools.compress(content, this._compressionLevel);
const result = await this.gzipTools.compress(content, this._compressionLevel);
this.archiveBuffer = Buffer.from(result);
} else {
throw new Error(`Unsupported format: ${this.creationFormat}`);
}
@@ -808,7 +824,7 @@ export class SmartArchive {
const content = await plugins.fsPromises.readFile(absolutePath);
this.pendingEntries.push({
archivePath,
content,
content: new Uint8Array(content),
});
}
}

View File

@@ -1,208 +1,51 @@
import * as plugins from './plugins.js';
import type { IArchiveEntry, TCompressionLevel } from './interfaces.js';
import { GzipTools } from './classes.gziptools.js';
import type { IArchiveEntry, TCompressionLevel } from '../ts_shared/interfaces.js';
import { TarTools as SharedTarTools } from '../ts_shared/classes.tartools.js';
import { GzipTools } from '../ts_shared/classes.gziptools.js';
/**
* TAR archive creation and extraction utilities
* Extended TAR archive utilities with Node.js filesystem support
*/
export class TarTools {
export class TarTools extends SharedTarTools {
/**
* Add a file to a TAR pack stream
* Pack a directory into a TAR buffer (Node.js only)
*/
public async addFileToPack(
pack: plugins.tarStream.Pack,
optionsArg: {
fileName?: string;
content?:
| string
| Buffer
| plugins.stream.Readable
| plugins.smartfile.SmartFile
| plugins.smartfile.StreamFile;
byteLength?: number;
filePath?: string;
}
): Promise<void> {
return new Promise<void>(async (resolve, reject) => {
let fileName: string | null = null;
if (optionsArg.fileName) {
fileName = optionsArg.fileName;
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
fileName = optionsArg.content.relative;
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
fileName = optionsArg.content.relativeFilePath;
} else if (optionsArg.filePath) {
fileName = optionsArg.filePath;
}
if (!fileName) {
reject(new Error('No filename specified for TAR entry'));
return;
}
// Determine content byte length
let contentByteLength: number | undefined;
if (optionsArg.byteLength) {
contentByteLength = optionsArg.byteLength;
} else if (typeof optionsArg.content === 'string') {
contentByteLength = Buffer.byteLength(optionsArg.content, 'utf8');
} else if (Buffer.isBuffer(optionsArg.content)) {
contentByteLength = optionsArg.content.length;
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
contentByteLength = await optionsArg.content.getSize();
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
contentByteLength = await optionsArg.content.getSize();
} else if (optionsArg.filePath) {
const fileStat = await plugins.fsPromises.stat(optionsArg.filePath);
contentByteLength = fileStat.size;
}
// Convert all content types to Readable stream
let content: plugins.stream.Readable;
if (Buffer.isBuffer(optionsArg.content)) {
content = plugins.stream.Readable.from(optionsArg.content);
} else if (typeof optionsArg.content === 'string') {
content = plugins.stream.Readable.from(Buffer.from(optionsArg.content));
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
content = plugins.stream.Readable.from(optionsArg.content.contents);
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
content = await optionsArg.content.createReadStream();
} else if (optionsArg.content instanceof plugins.stream.Readable) {
content = optionsArg.content;
} else if (optionsArg.filePath) {
content = plugins.fs.createReadStream(optionsArg.filePath);
} else {
reject(new Error('No content or filePath specified for TAR entry'));
return;
}
const entry = pack.entry(
{
name: fileName,
...(contentByteLength !== undefined ? { size: contentByteLength } : {}),
},
(err: Error | null) => {
if (err) {
reject(err);
} else {
resolve();
}
}
);
content.pipe(entry);
// Note: resolve() is called in the callback above when pipe completes
});
}
/**
* Pack a directory into a TAR stream
*/
public async packDirectory(directoryPath: string): Promise<plugins.tarStream.Pack> {
public async packDirectory(directoryPath: string): Promise<Uint8Array> {
const fileTree = await plugins.listFileTree(directoryPath, '**/*');
const pack = await this.getPackStream();
const entries: IArchiveEntry[] = [];
for (const filePath of fileTree) {
const absolutePath = plugins.path.join(directoryPath, filePath);
const fileStat = await plugins.fsPromises.stat(absolutePath);
await this.addFileToPack(pack, {
byteLength: fileStat.size,
filePath: absolutePath,
fileName: filePath,
content: plugins.fs.createReadStream(absolutePath),
const content = await plugins.fsPromises.readFile(absolutePath);
entries.push({
archivePath: filePath,
content: new Uint8Array(content),
});
}
return pack;
return this.packFiles(entries);
}
/**
* Get a new TAR pack stream
*/
public async getPackStream(): Promise<plugins.tarStream.Pack> {
return plugins.tarStream.pack();
}
/**
* Get a TAR extraction stream
*/
public getDecompressionStream(): plugins.tarStream.Extract {
return plugins.tarStream.extract();
}
/**
* Pack files into a TAR buffer
*/
public async packFiles(files: IArchiveEntry[]): Promise<Buffer> {
const pack = await this.getPackStream();
for (const file of files) {
await this.addFileToPack(pack, {
fileName: file.archivePath,
content: file.content as string | Buffer | plugins.stream.Readable | plugins.smartfile.SmartFile | plugins.smartfile.StreamFile,
byteLength: file.size,
});
}
pack.finalize();
const chunks: Buffer[] = [];
return new Promise((resolve, reject) => {
pack.on('data', (chunk: Buffer) => chunks.push(chunk));
pack.on('end', () => resolve(Buffer.concat(chunks)));
pack.on('error', reject);
});
}
/**
* Pack a directory into a TAR.GZ buffer
* Pack a directory into a TAR.GZ buffer (Node.js only)
*/
public async packDirectoryToTarGz(
directoryPath: string,
compressionLevel?: TCompressionLevel
): Promise<Buffer> {
const pack = await this.packDirectory(directoryPath);
pack.finalize();
): Promise<Uint8Array> {
const tarBuffer = await this.packDirectory(directoryPath);
const gzipTools = new GzipTools();
const gzipStream = gzipTools.getCompressionStream(compressionLevel);
const chunks: Buffer[] = [];
return new Promise((resolve, reject) => {
pack
.pipe(gzipStream)
.on('data', (chunk: Buffer) => chunks.push(chunk))
.on('end', () => resolve(Buffer.concat(chunks)))
.on('error', reject);
});
return gzipTools.compress(tarBuffer, compressionLevel);
}
/**
* Pack a directory into a TAR.GZ stream
* Pack a directory into a TAR.GZ stream (Node.js only)
*/
public async packDirectoryToTarGzStream(
directoryPath: string,
compressionLevel?: TCompressionLevel
): Promise<plugins.stream.Readable> {
const pack = await this.packDirectory(directoryPath);
pack.finalize();
const gzipTools = new GzipTools();
const gzipStream = gzipTools.getCompressionStream(compressionLevel);
return pack.pipe(gzipStream);
}
/**
* Pack files into a TAR.GZ buffer
*/
public async packFilesToTarGz(
files: IArchiveEntry[],
compressionLevel?: TCompressionLevel
): Promise<Buffer> {
const tarBuffer = await this.packFiles(files);
const gzipTools = new GzipTools();
return gzipTools.compress(tarBuffer, compressionLevel);
const buffer = await this.packDirectoryToTarGz(directoryPath, compressionLevel);
return plugins.stream.Readable.from(buffer);
}
}

View File

@@ -1,196 +0,0 @@
import * as plugins from './plugins.js';
import type { IArchiveEntry, TCompressionLevel } from './interfaces.js';
/**
* Transform stream for ZIP decompression using fflate
* Emits StreamFile objects for each file in the archive
*/
export class ZipDecompressionTransform extends plugins.smartstream.SmartDuplex<Buffer, plugins.smartfile.StreamFile> {
private streamtools!: plugins.smartstream.IStreamTools;
private unzipper = new plugins.fflate.Unzip(async (fileArg) => {
let resultBuffer: Buffer;
fileArg.ondata = async (_flateError, dat, final) => {
resultBuffer
? (resultBuffer = Buffer.concat([resultBuffer, Buffer.from(dat)]))
: (resultBuffer = Buffer.from(dat));
if (final) {
const streamFile = plugins.smartfile.StreamFile.fromBuffer(resultBuffer);
streamFile.relativeFilePath = fileArg.name;
this.streamtools.push(streamFile);
}
};
fileArg.start();
});
constructor() {
super({
objectMode: true,
writeFunction: async (chunkArg, streamtoolsArg) => {
this.streamtools ? null : (this.streamtools = streamtoolsArg);
this.unzipper.push(
Buffer.isBuffer(chunkArg) ? chunkArg : Buffer.from(chunkArg as unknown as ArrayBuffer),
false
);
return null;
},
finalFunction: async () => {
this.unzipper.push(Buffer.from(''), true);
await plugins.smartdelay.delayFor(0);
await this.streamtools.push(null);
return null;
},
});
this.unzipper.register(plugins.fflate.UnzipInflate);
}
}
/**
* Streaming ZIP compression using fflate
* Allows adding multiple entries before finalizing
*/
export class ZipCompressionStream extends plugins.stream.Duplex {
private files: Map<string, { data: Uint8Array; options?: plugins.fflate.ZipOptions }> = new Map();
private finalized = false;
constructor() {
super();
}
/**
* Add a file entry to the ZIP archive
*/
public async addEntry(
fileName: string,
content: Buffer | plugins.stream.Readable,
options?: { compressionLevel?: TCompressionLevel }
): Promise<void> {
if (this.finalized) {
throw new Error('Cannot add entries to a finalized ZIP archive');
}
let data: Buffer;
if (Buffer.isBuffer(content)) {
data = content;
} else {
// Collect stream to buffer
const chunks: Buffer[] = [];
for await (const chunk of content) {
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
}
data = Buffer.concat(chunks);
}
this.files.set(fileName, {
data: new Uint8Array(data),
options: options?.compressionLevel !== undefined ? { level: options.compressionLevel } : undefined,
});
}
/**
* Finalize the ZIP archive and emit the compressed data
*/
public async finalize(): Promise<void> {
if (this.finalized) {
return;
}
this.finalized = true;
const filesObj: plugins.fflate.Zippable = {};
for (const [name, { data, options }] of this.files) {
filesObj[name] = options ? [data, options] : data;
}
// Use sync version for Deno compatibility (fflate async uses Web Workers)
try {
const result = plugins.fflate.zipSync(filesObj);
this.push(Buffer.from(result));
this.push(null);
} catch (err) {
throw err;
}
}
_read(): void {
// No-op: data is pushed when finalize() is called
}
_write(
_chunk: Buffer,
_encoding: BufferEncoding,
callback: (error?: Error | null) => void
): void {
// Not used for ZIP creation - use addEntry() instead
callback(new Error('Use addEntry() to add files to the ZIP archive'));
}
}
/**
* ZIP compression and decompression utilities
*/
export class ZipTools {
/**
* Get a streaming compression object for creating ZIP archives
*/
public getCompressionStream(): ZipCompressionStream {
return new ZipCompressionStream();
}
/**
* Get a streaming decompression transform for extracting ZIP archives
*/
public getDecompressionStream(): ZipDecompressionTransform {
return new ZipDecompressionTransform();
}
/**
* Create a ZIP archive from an array of entries
*/
public async createZip(entries: IArchiveEntry[], compressionLevel?: TCompressionLevel): Promise<Buffer> {
const filesObj: plugins.fflate.Zippable = {};
for (const entry of entries) {
let data: Uint8Array;
if (typeof entry.content === 'string') {
data = new TextEncoder().encode(entry.content);
} else if (Buffer.isBuffer(entry.content)) {
data = new Uint8Array(entry.content);
} else if (entry.content instanceof plugins.smartfile.SmartFile) {
data = new Uint8Array(entry.content.contents);
} else if (entry.content instanceof plugins.smartfile.StreamFile) {
const buffer = await entry.content.getContentAsBuffer();
data = new Uint8Array(buffer);
} else {
// Readable stream
const chunks: Buffer[] = [];
for await (const chunk of entry.content as plugins.stream.Readable) {
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
}
data = new Uint8Array(Buffer.concat(chunks));
}
if (compressionLevel !== undefined) {
filesObj[entry.archivePath] = [data, { level: compressionLevel }];
} else {
filesObj[entry.archivePath] = data;
}
}
// Use sync version for Deno compatibility (fflate async uses Web Workers)
const result = plugins.fflate.zipSync(filesObj);
return Buffer.from(result);
}
/**
* Extract a ZIP buffer to an array of entries
*/
public async extractZip(data: Buffer): Promise<Array<{ path: string; content: Buffer }>> {
// Use sync version for Deno compatibility (fflate async uses Web Workers)
const result = plugins.fflate.unzipSync(data);
const entries: Array<{ path: string; content: Buffer }> = [];
for (const [path, content] of Object.entries(result)) {
entries.push({ path, content: Buffer.from(content) });
}
return entries;
}
}

View File

@@ -1,15 +1,11 @@
// Core types and errors
export * from './interfaces.js';
export * from './errors.js';
// Re-export everything from ts_shared (browser-compatible)
export * from '../ts_shared/index.js';
// Main archive class
// Node.js-specific: Main archive class with filesystem support
export * from './classes.smartarchive.js';
// Format-specific tools
export * from './classes.tartools.js';
export * from './classes.ziptools.js';
export * from './classes.gziptools.js';
export * from './classes.bzip2tools.js';
// Archive analysis
// Node.js-specific: Archive analysis with SmartArchive integration
export * from './classes.archiveanalyzer.js';
// Node.js-specific: Extended TarTools with filesystem support (overrides shared TarTools)
export { TarTools } from './classes.tartools.js';

View File

@@ -1,4 +1,4 @@
// node native scope
// Node.js native scope
import * as path from 'node:path';
import * as stream from 'node:stream';
import * as fs from 'node:fs';
@@ -30,32 +30,20 @@ export async function listFileTree(dirPath: string, _pattern: string = '**/*'):
return results;
}
// @pushrocks scope
import * as smartfile from '@push.rocks/smartfile';
import * as smartdelay from '@push.rocks/smartdelay';
// Re-export browser-compatible plugins from ts_shared
export * from '../ts_shared/plugins.js';
// Additional Node.js-specific @pushrocks packages
import * as smartpath from '@push.rocks/smartpath';
import * as smartpromise from '@push.rocks/smartpromise';
import * as smartrequest from '@push.rocks/smartrequest';
import * as smartunique from '@push.rocks/smartunique';
import * as smartstream from '@push.rocks/smartstream';
import * as smartrx from '@push.rocks/smartrx';
import * as smarturl from '@push.rocks/smarturl';
export {
smartfile,
smartdelay,
smartpath,
smartpromise,
smartrequest,
smartunique,
smartstream,
smartrx,
smarturl,
};
// third party scope
import * as fileType from 'file-type';
import * as fflate from 'fflate';
import tarStream from 'tar-stream';
export { fileType, fflate, tarStream };

View File

@@ -6,7 +6,7 @@ const BITMASK = [0, 0x01, 0x03, 0x07, 0x0f, 0x1f, 0x3f, 0x7f, 0xff] as const;
* Creates a bit reader function for BZIP2 decompression.
* Takes a buffer iterator as input and returns a function that reads bits.
*/
export function bitIterator(nextBuffer: () => Buffer): IBitReader {
export function bitIterator(nextBuffer: () => Uint8Array): IBitReader {
let bit = 0;
let byte = 0;
let bytes = nextBuffer();

View File

@@ -71,7 +71,7 @@ export class Bzip2 {
/**
* Create a bit reader from a byte array
*/
array(bytes: Uint8Array | Buffer): (n: number) => number {
array(bytes: Uint8Array): (n: number) => number {
let bit = 0;
let byte = 0;
const BITMASK = [0, 0x01, 0x03, 0x07, 0x0f, 0x1f, 0x3f, 0x7f, 0xff];
@@ -99,7 +99,7 @@ export class Bzip2 {
/**
* Simple decompression from a buffer
*/
simple(srcbuffer: Uint8Array | Buffer, stream: (byte: number) => void): void {
simple(srcbuffer: Uint8Array, stream: (byte: number) => void): void {
const bits = this.array(srcbuffer);
const size = this.header(bits as IBitReader);
let ret: number | null = 0;

View File

@@ -8,16 +8,16 @@ import { bitIterator } from './bititerator.js';
/**
* Creates a streaming BZIP2 decompression transform
*/
export function unbzip2Stream(): plugins.smartstream.SmartDuplex<Buffer, Buffer> {
export function unbzip2Stream(): plugins.smartstream.SmartDuplex<Uint8Array, Uint8Array> {
const bzip2Instance = new Bzip2();
const bufferQueue: Buffer[] = [];
const bufferQueue: Uint8Array[] = [];
let hasBytes = 0;
let blockSize = 0;
let broken = false;
let bitReader: IBitReader | null = null;
let streamCRC: number | null = null;
function decompressBlock(): Buffer | undefined {
function decompressBlock(): Uint8Array | undefined {
if (!blockSize) {
blockSize = bzip2Instance.header(bitReader!);
streamCRC = 0;
@@ -40,12 +40,12 @@ export function unbzip2Stream(): plugins.smartstream.SmartDuplex<Buffer, Buffer>
return undefined;
}
return Buffer.from(chunk);
return new Uint8Array(chunk);
}
let outlength = 0;
const decompressAndPush = async (): Promise<Buffer | undefined> => {
const decompressAndPush = async (): Promise<Uint8Array | undefined> => {
if (broken) return undefined;
try {
@@ -63,7 +63,7 @@ export function unbzip2Stream(): plugins.smartstream.SmartDuplex<Buffer, Buffer>
}
};
return new plugins.smartstream.SmartDuplex<Buffer, Buffer>({
return new plugins.smartstream.SmartDuplex<Uint8Array, Uint8Array>({
objectMode: true,
name: 'bzip2',
highWaterMark: 1,

View File

@@ -0,0 +1,14 @@
import * as plugins from './plugins.js';
import { unbzip2Stream } from './bzip2/index.js';
/**
* BZIP2 decompression utilities (browser-compatible)
*/
export class Bzip2Tools {
/**
* Get a streaming decompression transform
*/
getDecompressionStream(): plugins.smartstream.SmartDuplex<Uint8Array, Uint8Array> {
return unbzip2Stream();
}
}

View File

@@ -0,0 +1,42 @@
import * as plugins from './plugins.js';
import type { TCompressionLevel } from './interfaces.js';
/**
* GZIP compression and decompression utilities (browser-compatible)
*/
export class GzipTools {
/**
* Compress data synchronously
*/
public compressSync(data: Uint8Array, level?: TCompressionLevel): Uint8Array {
const options = level !== undefined ? { level } : undefined;
return plugins.fflate.gzipSync(data, options);
}
/**
* Decompress data synchronously
*/
public decompressSync(data: Uint8Array): Uint8Array {
return plugins.fflate.gunzipSync(data);
}
/**
* Compress data asynchronously
* Note: Uses sync version for Deno compatibility (fflate async uses Web Workers
* which have issues in Deno)
*/
public async compress(data: Uint8Array, level?: TCompressionLevel): Promise<Uint8Array> {
// Use sync version wrapped in Promise for cross-runtime compatibility
return this.compressSync(data, level);
}
/**
* Decompress data asynchronously
* Note: Uses sync version for Deno compatibility (fflate async uses Web Workers
* which have issues in Deno)
*/
public async decompress(data: Uint8Array): Promise<Uint8Array> {
// Use sync version wrapped in Promise for cross-runtime compatibility
return this.decompressSync(data);
}
}

View File

@@ -0,0 +1,89 @@
import * as plugins from './plugins.js';
import type { IArchiveEntry, ITarEntry, TCompressionLevel } from './interfaces.js';
import { GzipTools } from './classes.gziptools.js';
/**
* TAR archive creation and extraction utilities using modern-tar (browser-compatible)
*/
export class TarTools {
/**
* Pack files into a TAR buffer
*/
public async packFiles(files: IArchiveEntry[]): Promise<Uint8Array> {
const entries: ITarEntry[] = [];
for (const file of files) {
let data: Uint8Array;
if (typeof file.content === 'string') {
data = new TextEncoder().encode(file.content);
} else if (file.content instanceof Uint8Array) {
data = file.content;
} else if (file.content instanceof plugins.smartfile.SmartFile) {
data = new Uint8Array(file.content.contents);
} else if (file.content instanceof plugins.smartfile.StreamFile) {
const buffer = await file.content.getContentAsBuffer();
data = new Uint8Array(buffer);
} else {
throw new Error('Unsupported content type for TAR entry');
}
entries.push({
header: {
name: file.archivePath,
size: data.length,
type: 'file',
mode: file.mode,
mtime: file.mtime,
},
body: data,
});
}
return plugins.modernTar.packTar(entries);
}
/**
* Extract a TAR buffer to an array of entries
*/
public async extractTar(data: Uint8Array): Promise<Array<{ path: string; content: Uint8Array; isDirectory: boolean }>> {
const entries = await plugins.modernTar.unpackTar(data);
const result: Array<{ path: string; content: Uint8Array; isDirectory: boolean }> = [];
for (const entry of entries) {
const isDirectory = entry.header.type === 'directory' || entry.header.name.endsWith('/');
// modern-tar uses 'data' property, not 'body'
const content = entry.data ?? new Uint8Array(0);
result.push({
path: entry.header.name,
content,
isDirectory,
});
}
return result;
}
/**
* Pack files into a TAR.GZ buffer
*/
public async packFilesToTarGz(
files: IArchiveEntry[],
compressionLevel?: TCompressionLevel
): Promise<Uint8Array> {
const tarBuffer = await this.packFiles(files);
const gzipTools = new GzipTools();
return gzipTools.compress(tarBuffer, compressionLevel);
}
/**
* Extract a TAR.GZ buffer to an array of entries
*/
public async extractTarGz(data: Uint8Array): Promise<Array<{ path: string; content: Uint8Array; isDirectory: boolean }>> {
const gzipTools = new GzipTools();
const tarBuffer = await gzipTools.decompress(data);
return this.extractTar(tarBuffer);
}
}

View File

@@ -0,0 +1,107 @@
import * as plugins from './plugins.js';
import type { IArchiveEntry, TCompressionLevel } from './interfaces.js';
/**
* Transform stream for ZIP decompression using fflate
* Emits StreamFile objects for each file in the archive
*/
export class ZipDecompressionTransform extends plugins.smartstream.SmartDuplex<Uint8Array, plugins.smartfile.StreamFile> {
private streamtools!: plugins.smartstream.IStreamTools;
private unzipper = new plugins.fflate.Unzip(async (fileArg) => {
let resultBuffer: Uint8Array;
fileArg.ondata = async (_flateError, dat, final) => {
if (resultBuffer) {
const combined = new Uint8Array(resultBuffer.length + dat.length);
combined.set(resultBuffer);
combined.set(dat, resultBuffer.length);
resultBuffer = combined;
} else {
resultBuffer = new Uint8Array(dat);
}
if (final) {
const streamFile = plugins.smartfile.StreamFile.fromBuffer(Buffer.from(resultBuffer));
streamFile.relativeFilePath = fileArg.name;
this.streamtools.push(streamFile);
}
};
fileArg.start();
});
constructor() {
super({
objectMode: true,
writeFunction: async (chunkArg, streamtoolsArg) => {
this.streamtools ? null : (this.streamtools = streamtoolsArg);
const chunk = chunkArg instanceof Uint8Array ? chunkArg : new Uint8Array(chunkArg);
this.unzipper.push(chunk, false);
return null;
},
finalFunction: async () => {
this.unzipper.push(new Uint8Array(0), true);
await plugins.smartdelay.delayFor(0);
await this.streamtools.push(null);
return null;
},
});
this.unzipper.register(plugins.fflate.UnzipInflate);
}
}
/**
* ZIP compression and decompression utilities
*/
export class ZipTools {
/**
* Get a streaming decompression transform for extracting ZIP archives
*/
public getDecompressionStream(): ZipDecompressionTransform {
return new ZipDecompressionTransform();
}
/**
* Create a ZIP archive from an array of entries
*/
public async createZip(entries: IArchiveEntry[], compressionLevel?: TCompressionLevel): Promise<Uint8Array> {
const filesObj: plugins.fflate.Zippable = {};
for (const entry of entries) {
let data: Uint8Array;
if (typeof entry.content === 'string') {
data = new TextEncoder().encode(entry.content);
} else if (entry.content instanceof Uint8Array) {
data = entry.content;
} else if (entry.content instanceof plugins.smartfile.SmartFile) {
data = new Uint8Array(entry.content.contents);
} else if (entry.content instanceof plugins.smartfile.StreamFile) {
const buffer = await entry.content.getContentAsBuffer();
data = new Uint8Array(buffer);
} else {
throw new Error('Unsupported content type for ZIP entry');
}
if (compressionLevel !== undefined) {
filesObj[entry.archivePath] = [data, { level: compressionLevel }];
} else {
filesObj[entry.archivePath] = data;
}
}
// Use sync version for Deno compatibility (fflate async uses Web Workers)
const result = plugins.fflate.zipSync(filesObj);
return result;
}
/**
* Extract a ZIP buffer to an array of entries
*/
public async extractZip(data: Uint8Array): Promise<Array<{ path: string; content: Uint8Array }>> {
// Use sync version for Deno compatibility (fflate async uses Web Workers)
const result = plugins.fflate.unzipSync(data);
const entries: Array<{ path: string; content: Uint8Array }> = [];
for (const [path, content] of Object.entries(result)) {
entries.push({ path, content });
}
return entries;
}
}

17
ts_shared/index.ts Normal file
View File

@@ -0,0 +1,17 @@
// ts_shared - Browser-compatible shared code
// Interfaces and types
export * from './interfaces.js';
// Error classes
export * from './errors.js';
// Tool classes
export { ZipTools, ZipDecompressionTransform } from './classes.ziptools.js';
export { GzipTools } from './classes.gziptools.js';
export { TarTools } from './classes.tartools.js';
export { Bzip2Tools } from './classes.bzip2tools.js';
// BZIP2 internals (for advanced usage)
export { unbzip2Stream } from './bzip2/index.js';
export { Bzip2 } from './bzip2/bzip2.js';

View File

@@ -1,4 +1,3 @@
import type * as stream from 'node:stream';
import type { SmartFile, StreamFile } from '@push.rocks/smartfile';
/**
@@ -22,13 +21,13 @@ export type TSupportedMime =
| undefined;
/**
* Entry to add to an archive during creation
* Entry to add to an archive during creation (browser-compatible)
*/
export interface IArchiveEntry {
/** Path within the archive */
archivePath: string;
/** Content: string, Buffer, Readable stream, SmartFile, or StreamFile */
content: string | Buffer | stream.Readable | SmartFile | StreamFile;
/** Content: string, Buffer/Uint8Array, SmartFile, or StreamFile */
content: string | Uint8Array | SmartFile | StreamFile;
/** Optional size hint for streams (improves performance) */
size?: number;
/** Optional file mode/permissions */
@@ -104,11 +103,9 @@ export interface IAddFileOptions {
/** Filename within the archive */
fileName?: string;
/** File content */
content?: string | Buffer | stream.Readable | SmartFile | StreamFile;
content?: string | Uint8Array | SmartFile | StreamFile;
/** Size in bytes (required for streams) */
byteLength?: number;
/** Path to file on disk (alternative to content) */
filePath?: string;
}
/**
@@ -134,3 +131,17 @@ export interface IHuffmanGroup {
* Entry filter predicate for fluent API
*/
export type TEntryFilter = (entry: IArchiveEntryInfo) => boolean;
/**
* TAR entry for modern-tar compatibility
*/
export interface ITarEntry {
header: {
name: string;
size: number;
type?: 'file' | 'directory';
mode?: number;
mtime?: Date;
};
body: string | Uint8Array;
}

22
ts_shared/plugins.ts Normal file
View File

@@ -0,0 +1,22 @@
// Browser-compatible plugins for ts_shared
// NO Node.js imports allowed here
// @push.rocks scope (browser-compatible)
import * as smartdelay from '@push.rocks/smartdelay';
import * as smartpromise from '@push.rocks/smartpromise';
import * as smartstream from '@push.rocks/smartstream';
import * as smartfile from '@push.rocks/smartfile';
export {
smartdelay,
smartpromise,
smartstream,
smartfile,
};
// third party scope (browser-compatible)
import * as fileType from 'file-type';
import * as fflate from 'fflate';
import * as modernTar from 'modern-tar';
export { fileType, fflate, modernTar };

View File

@@ -0,0 +1,8 @@
/**
* autocreated commitinfo by @push.rocks/commitinfo
*/
export const commitinfo = {
name: '@push.rocks/smartarchive',
version: '5.1.0',
description: 'A library for working with archive files, providing utilities for compressing and decompressing data.'
}

4
ts_web/index.ts Normal file
View File

@@ -0,0 +1,4 @@
// ts_web - Browser-compatible entry point
// Re-exports everything from ts_shared
export * from '../ts_shared/index.js';

3
ts_web/plugins.ts Normal file
View File

@@ -0,0 +1,3 @@
// Browser-compatible plugins for ts_web
// Re-export from ts_shared
export * from '../ts_shared/plugins.js';