Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 648406d8b4 | |||
| db48fcd455 | |||
| d97e9c1dce | |||
| 6393527c95 |
22
changelog.md
22
changelog.md
@@ -1,5 +1,27 @@
|
||||
# Changelog
|
||||
|
||||
## 2026-01-01 - 5.2.0 - feat(tartools)
|
||||
add streaming TAR support (tar-stream), Node.js streaming APIs for TarTools, and browser / web bundle docs
|
||||
|
||||
- Add tar-stream runtime dependency and @types/tar-stream devDependency
|
||||
- Introduce streaming TarTools APIs: getPackStream, addFileToPack, getExtractStream, extractToDirectory, getDirectoryPackStream, getDirectoryPackStreamGz
|
||||
- Switch SmartArchive TAR extraction to use tar-stream extract for true streaming ingestion of entries
|
||||
- Export tarStream in plugins and export ITarPackFileOptions from the Node.js entrypoint
|
||||
- Update packDirectory/packDirectoryToTarGz to handle files safely and use fflate.gzipSync for buffer-based gzipping
|
||||
- README updates: document /web browser bundle, browser usage examples, Uint8Array guidance, updated feature table and streaming examples
|
||||
|
||||
## 2026-01-01 - 5.1.0 - feat(archive)
|
||||
introduce ts_shared browser-compatible layer, refactor Node-specific tools to wrap/shared implementations, and modernize archive handling
|
||||
|
||||
- Split code into ts_shared (browser-compatible) and ts_web entrypoint; node-specific wrappers remain under ts/
|
||||
- Switched TAR implementation from tar-stream to modern-tar and replaced stream-based TAR handling with Uint8Array-based pack/unpack
|
||||
- Normalized shared APIs to use Uint8Array (instead of Buffer) for browser compatibility — callers may need to adapt Buffer/Uint8Array usage (breaking)
|
||||
- Moved BZIP2, GZIP, ZIP, TAR logic into ts_shared and updated plugins to re-export shared plugins for web builds
|
||||
- Adjusted classes.smartarchive to consume shared tools and convert between Buffer and Uint8Array where needed
|
||||
- Added package.json exports for "." and "./web", bumped several dependency/devDependency versions, and added modern-tar and related changes
|
||||
- Updated npmextra.json with new scoped configuration and release registries
|
||||
- Removed pnpm-workspace.yaml entries and deleted several legacy Node-only files in favor of shared implementations
|
||||
|
||||
## 2025-11-25 - 5.0.1 - fix(ziptools,gziptools)
|
||||
Use fflate synchronous APIs for ZIP and GZIP operations for Deno compatibility; add TEntryFilter type and small docs/tests cleanup
|
||||
|
||||
|
||||
8
dist_ts/index.d.ts
vendored
8
dist_ts/index.d.ts
vendored
@@ -1,8 +1,4 @@
|
||||
export * from './interfaces.js';
|
||||
export * from './errors.js';
|
||||
export * from '../ts_shared/index.js';
|
||||
export * from './classes.smartarchive.js';
|
||||
export * from './classes.tartools.js';
|
||||
export * from './classes.ziptools.js';
|
||||
export * from './classes.gziptools.js';
|
||||
export * from './classes.bzip2tools.js';
|
||||
export * from './classes.archiveanalyzer.js';
|
||||
export { TarTools, type ITarPackFileOptions } from './classes.tartools.js';
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
// Core types and errors
|
||||
export * from './interfaces.js';
|
||||
export * from './errors.js';
|
||||
// Main archive class
|
||||
// Re-export everything from ts_shared (browser-compatible)
|
||||
export * from '../ts_shared/index.js';
|
||||
// Node.js-specific: Main archive class with filesystem support
|
||||
export * from './classes.smartarchive.js';
|
||||
// Format-specific tools
|
||||
export * from './classes.tartools.js';
|
||||
export * from './classes.ziptools.js';
|
||||
export * from './classes.gziptools.js';
|
||||
export * from './classes.bzip2tools.js';
|
||||
// Archive analysis
|
||||
// Node.js-specific: Archive analysis with SmartArchive integration
|
||||
export * from './classes.archiveanalyzer.js';
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSx3QkFBd0I7QUFDeEIsY0FBYyxpQkFBaUIsQ0FBQztBQUNoQyxjQUFjLGFBQWEsQ0FBQztBQUU1QixxQkFBcUI7QUFDckIsY0FBYywyQkFBMkIsQ0FBQztBQUUxQyx3QkFBd0I7QUFDeEIsY0FBYyx1QkFBdUIsQ0FBQztBQUN0QyxjQUFjLHVCQUF1QixDQUFDO0FBQ3RDLGNBQWMsd0JBQXdCLENBQUM7QUFDdkMsY0FBYyx5QkFBeUIsQ0FBQztBQUV4QyxtQkFBbUI7QUFDbkIsY0FBYyw4QkFBOEIsQ0FBQyJ9
|
||||
// Node.js-specific: Extended TarTools with streaming support (overrides shared TarTools)
|
||||
export { TarTools } from './classes.tartools.js';
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi90cy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSwyREFBMkQ7QUFDM0QsY0FBYyx1QkFBdUIsQ0FBQztBQUV0QywrREFBK0Q7QUFDL0QsY0FBYywyQkFBMkIsQ0FBQztBQUUxQyxtRUFBbUU7QUFDbkUsY0FBYyw4QkFBOEIsQ0FBQztBQUU3Qyx5RkFBeUY7QUFDekYsT0FBTyxFQUFFLFFBQVEsRUFBNEIsTUFBTSx1QkFBdUIsQ0FBQyJ9
|
||||
34
dist_ts/smartarchive.classes.smartarchive.d.ts
vendored
34
dist_ts/smartarchive.classes.smartarchive.d.ts
vendored
@@ -1,34 +0,0 @@
|
||||
/// <reference types="node" resolution-mode="require"/>
|
||||
import * as plugins from './smartarchive.plugins.js';
|
||||
export declare class SmartArchive {
|
||||
constructor();
|
||||
/**
|
||||
* extracts an archive from a given url
|
||||
*/
|
||||
extractArchiveFromUrlToFs(urlArg: string, targetDir: string): Promise<void>;
|
||||
/**
|
||||
* extracts an archive from a given filePath on disk
|
||||
* @param filePathArg
|
||||
* @param targetDirArg
|
||||
*/
|
||||
extractArchiveFromFilePathToFs(filePathArg: string, targetDirArg: string): Promise<void>;
|
||||
/**
|
||||
* extracts to Observable
|
||||
* where the Observable is emitting smartfiles
|
||||
*/
|
||||
extractArchiveFromBufferToObservable(bufferArg: Buffer): Promise<plugins.smartrx.rxjs.ReplaySubject<plugins.smartfile.Smartfile>>;
|
||||
extractArchiveWithIntakeAndReplaySubject(): {
|
||||
intake: plugins.smartstream.StreamIntake<Buffer>;
|
||||
replaySubject: plugins.smartrx.rxjs.ReplaySubject<plugins.smartfile.Smartfile>;
|
||||
};
|
||||
/**
|
||||
* extracts to Observable
|
||||
*/
|
||||
extractArchiveFromUrlToObservable(urlArg: string): Promise<plugins.smartrx.rxjs.ReplaySubject<plugins.smartfile.Smartfile>>;
|
||||
extractArchiveFromUrlToStream(): Promise<void>;
|
||||
extractArchiveFromFilePathToStream(): Promise<void>;
|
||||
extractArchiveFromStreamToStream(): Promise<void>;
|
||||
packFromStreamToStream(): Promise<void>;
|
||||
packFromDirPathToStream(): Promise<void>;
|
||||
packFromDirPathToFs(): Promise<void>;
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
2
dist_ts/smartarchive.paths.d.ts
vendored
2
dist_ts/smartarchive.paths.d.ts
vendored
@@ -1,2 +0,0 @@
|
||||
export declare const packageDir: string;
|
||||
export declare const nogitDir: string;
|
||||
@@ -1,4 +0,0 @@
|
||||
import * as plugins from './smartarchive.plugins.js';
|
||||
export const packageDir = plugins.path.join(plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url), '../');
|
||||
export const nogitDir = plugins.path.join(packageDir, './.nogit');
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic21hcnRhcmNoaXZlLnBhdGhzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vdHMvc21hcnRhcmNoaXZlLnBhdGhzLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sS0FBSyxPQUFPLE1BQU0sMkJBQTJCLENBQUM7QUFFckQsTUFBTSxDQUFDLE1BQU0sVUFBVSxHQUFHLE9BQU8sQ0FBQyxJQUFJLENBQUMsSUFBSSxDQUN6QyxPQUFPLENBQUMsU0FBUyxDQUFDLEdBQUcsQ0FBQyx3QkFBd0IsQ0FBQyxNQUFNLENBQUMsSUFBSSxDQUFDLEdBQUcsQ0FBQyxFQUMvRCxLQUFLLENBQ04sQ0FBQztBQUNGLE1BQU0sQ0FBQyxNQUFNLFFBQVEsR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxVQUFVLEVBQUUsVUFBVSxDQUFDLENBQUMifQ==
|
||||
14
dist_ts/smartarchive.plugins.d.ts
vendored
14
dist_ts/smartarchive.plugins.d.ts
vendored
@@ -1,14 +0,0 @@
|
||||
import * as path from 'path';
|
||||
export { path };
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smartrequest from '@push.rocks/smartrequest';
|
||||
import * as smartunique from '@push.rocks/smartunique';
|
||||
import * as smartstream from '@push.rocks/smartstream';
|
||||
import * as smartrx from '@push.rocks/smartrx';
|
||||
export { smartfile, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx };
|
||||
import gunzipMaybe from 'gunzip-maybe';
|
||||
import tar from 'tar';
|
||||
import tarStream from 'tar-stream';
|
||||
export { gunzipMaybe, tar, tarStream };
|
||||
@@ -1,19 +0,0 @@
|
||||
// node native scope
|
||||
import * as path from 'path';
|
||||
export { path };
|
||||
// @pushrocks scope
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smartrequest from '@push.rocks/smartrequest';
|
||||
import * as smartunique from '@push.rocks/smartunique';
|
||||
import * as smartstream from '@push.rocks/smartstream';
|
||||
import * as smartrx from '@push.rocks/smartrx';
|
||||
export { smartfile, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx };
|
||||
// third party scope
|
||||
import gunzipMaybe from 'gunzip-maybe';
|
||||
// @ts-ignore
|
||||
import tar from 'tar';
|
||||
import tarStream from 'tar-stream';
|
||||
export { gunzipMaybe, tar, tarStream };
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic21hcnRhcmNoaXZlLnBsdWdpbnMuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9zbWFydGFyY2hpdmUucGx1Z2lucy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxvQkFBb0I7QUFDcEIsT0FBTyxLQUFLLElBQUksTUFBTSxNQUFNLENBQUM7QUFFN0IsT0FBTyxFQUFFLElBQUksRUFBRSxDQUFDO0FBRWhCLG1CQUFtQjtBQUNuQixPQUFPLEtBQUssU0FBUyxNQUFNLHVCQUF1QixDQUFDO0FBQ25ELE9BQU8sS0FBSyxTQUFTLE1BQU0sdUJBQXVCLENBQUM7QUFDbkQsT0FBTyxLQUFLLFlBQVksTUFBTSwwQkFBMEIsQ0FBQztBQUN6RCxPQUFPLEtBQUssWUFBWSxNQUFNLDBCQUEwQixDQUFDO0FBQ3pELE9BQU8sS0FBSyxXQUFXLE1BQU0seUJBQXlCLENBQUM7QUFDdkQsT0FBTyxLQUFLLFdBQVcsTUFBTSx5QkFBeUIsQ0FBQztBQUN2RCxPQUFPLEtBQUssT0FBTyxNQUFNLHFCQUFxQixDQUFDO0FBRS9DLE9BQU8sRUFBRSxTQUFTLEVBQUUsU0FBUyxFQUFFLFlBQVksRUFBRSxZQUFZLEVBQUUsV0FBVyxFQUFFLFdBQVcsRUFBRSxPQUFPLEVBQUUsQ0FBQztBQUUvRixvQkFBb0I7QUFDcEIsT0FBTyxXQUFXLE1BQU0sY0FBYyxDQUFDO0FBRXZDLGFBQWE7QUFDYixPQUFPLEdBQUcsTUFBTSxLQUFLLENBQUM7QUFDdEIsT0FBTyxTQUFTLE1BQU0sWUFBWSxDQUFDO0FBRW5DLE9BQU8sRUFBRSxXQUFXLEVBQUUsR0FBRyxFQUFFLFNBQVMsRUFBRSxDQUFDIn0=
|
||||
@@ -1,9 +1,5 @@
|
||||
{
|
||||
"npmci": {
|
||||
"npmGlobalTools": [],
|
||||
"npmAccessLevel": "public"
|
||||
},
|
||||
"gitzone": {
|
||||
"@git.zone/cli": {
|
||||
"projectType": "npm",
|
||||
"module": {
|
||||
"githost": "code.foss.global",
|
||||
@@ -25,9 +21,19 @@
|
||||
"data analysis",
|
||||
"file stream"
|
||||
]
|
||||
},
|
||||
"release": {
|
||||
"registries": [
|
||||
"https://verdaccio.lossless.digital",
|
||||
"https://registry.npmjs.org"
|
||||
],
|
||||
"accessLevel": "public"
|
||||
}
|
||||
},
|
||||
"tsdoc": {
|
||||
"@git.zone/tsdoc": {
|
||||
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
||||
},
|
||||
"@ship.zone/szci": {
|
||||
"npmGlobalTools": []
|
||||
}
|
||||
}
|
||||
}
|
||||
23
package.json
23
package.json
@@ -1,10 +1,14 @@
|
||||
{
|
||||
"name": "@push.rocks/smartarchive",
|
||||
"version": "5.0.1",
|
||||
"version": "5.2.0",
|
||||
"description": "A library for working with archive files, providing utilities for compressing and decompressing data.",
|
||||
"main": "dist_ts/index.js",
|
||||
"typings": "dist_ts/index.d.ts",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": "./dist_ts/index.js",
|
||||
"./web": "./dist_ts_web/index.js"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "(tstest test/ --verbose)",
|
||||
"build": "tsbuild --web --allowimplicitany",
|
||||
@@ -22,31 +26,34 @@
|
||||
"homepage": "https://code.foss.global/push.rocks/smartarchive#readme",
|
||||
"dependencies": {
|
||||
"@push.rocks/smartdelay": "^3.0.5",
|
||||
"@push.rocks/smartfile": "^13.0.0",
|
||||
"@push.rocks/smartfile": "^13.1.2",
|
||||
"@push.rocks/smartpath": "^6.0.0",
|
||||
"@push.rocks/smartpromise": "^4.2.3",
|
||||
"@push.rocks/smartrequest": "^4.2.2",
|
||||
"@push.rocks/smartrequest": "^5.0.1",
|
||||
"@push.rocks/smartrx": "^3.0.10",
|
||||
"@push.rocks/smartstream": "^3.2.5",
|
||||
"@push.rocks/smartunique": "^3.0.9",
|
||||
"@push.rocks/smarturl": "^3.1.0",
|
||||
"@types/tar-stream": "^3.1.4",
|
||||
"fflate": "^0.8.2",
|
||||
"file-type": "^21.0.0",
|
||||
"file-type": "^21.2.0",
|
||||
"modern-tar": "^0.7.3",
|
||||
"tar-stream": "^3.1.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@git.zone/tsbuild": "^3.1.0",
|
||||
"@git.zone/tsrun": "^2.0.0",
|
||||
"@git.zone/tstest": "^3.1.3"
|
||||
"@git.zone/tsbuild": "^4.0.2",
|
||||
"@git.zone/tsrun": "^2.0.1",
|
||||
"@git.zone/tstest": "^3.1.4",
|
||||
"@types/tar-stream": "^3.1.3"
|
||||
},
|
||||
"private": false,
|
||||
"files": [
|
||||
"ts/**/*",
|
||||
"ts_shared/**/*",
|
||||
"ts_web/**/*",
|
||||
"dist/**/*",
|
||||
"dist_*/**/*",
|
||||
"dist_ts/**/*",
|
||||
"dist_ts_shared/**/*",
|
||||
"dist_ts_web/**/*",
|
||||
"assets/**/*",
|
||||
"cli.js",
|
||||
|
||||
5087
pnpm-lock.yaml
generated
5087
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -1,4 +0,0 @@
|
||||
onlyBuiltDependencies:
|
||||
- esbuild
|
||||
- mongodb-memory-server
|
||||
- puppeteer
|
||||
202
readme.md
202
readme.md
@@ -1,6 +1,6 @@
|
||||
# @push.rocks/smartarchive 📦
|
||||
|
||||
A powerful, streaming-first archive manipulation library with a fluent builder API. Works seamlessly in Node.js and Deno.
|
||||
A powerful, streaming-first archive manipulation library with a fluent builder API. Works seamlessly in **Node.js**, **Deno**, and **browsers**.
|
||||
|
||||
## Issue Reporting and Security
|
||||
|
||||
@@ -12,11 +12,12 @@ For reporting bugs, issues, or security vulnerabilities, please visit [community
|
||||
- 🌊 **Streaming-first architecture** – Process large archives without memory constraints
|
||||
- ✨ **Fluent builder API** – Chain methods for readable, expressive code
|
||||
- 🎯 **Smart detection** – Automatically identifies archive types via magic bytes
|
||||
- ⚡ **High performance** – Built on `tar-stream` and `fflate` for speed
|
||||
- ⚡ **High performance** – Built on `modern-tar` and `fflate` for speed
|
||||
- 🔧 **Flexible I/O** – Work with files, URLs, streams, and buffers seamlessly
|
||||
- 🛠️ **Modern TypeScript** – Full type safety and excellent IDE support
|
||||
- 🔄 **Dual-mode operation** – Extract existing archives OR create new ones
|
||||
- 🦕 **Cross-runtime** – Works in both Node.js and Deno environments
|
||||
- 🦕 **Cross-runtime** – Works in Node.js, Deno, and browsers
|
||||
- 🌐 **Browser-ready** – Dedicated browser bundle with zero Node.js dependencies
|
||||
|
||||
## Installation 📥
|
||||
|
||||
@@ -71,6 +72,59 @@ await SmartArchive.create()
|
||||
.extract('./node_modules/lodash');
|
||||
```
|
||||
|
||||
## Browser Usage 🌐
|
||||
|
||||
smartarchive provides a dedicated browser-compatible bundle with no Node.js dependencies:
|
||||
|
||||
```typescript
|
||||
// Import from the /web subpath for browser environments
|
||||
import { TarTools, ZipTools, GzipTools, Bzip2Tools } from '@push.rocks/smartarchive/web';
|
||||
|
||||
// Create a TAR archive in the browser
|
||||
const tarTools = new TarTools();
|
||||
const tarBuffer = await tarTools.packFiles([
|
||||
{ archivePath: 'hello.txt', content: 'Hello from the browser!' },
|
||||
{ archivePath: 'data.json', content: JSON.stringify({ browser: true }) }
|
||||
]);
|
||||
|
||||
// Create a TAR.GZ archive
|
||||
const tgzBuffer = await tarTools.packFilesToTarGz([
|
||||
{ archivePath: 'file.txt', content: 'Compressed!' }
|
||||
], 6);
|
||||
|
||||
// Extract a TAR archive
|
||||
const entries = await tarTools.extractTar(tarBuffer);
|
||||
for (const entry of entries) {
|
||||
console.log(`${entry.path}: ${entry.content.length} bytes`);
|
||||
}
|
||||
|
||||
// Work with ZIP files
|
||||
const zipTools = new ZipTools();
|
||||
const zipBuffer = await zipTools.createZip([
|
||||
{ archivePath: 'doc.txt', content: 'Document content' }
|
||||
], 6);
|
||||
|
||||
const zipEntries = await zipTools.extractZip(zipBuffer);
|
||||
|
||||
// GZIP compression
|
||||
const gzipTools = new GzipTools();
|
||||
const compressed = gzipTools.compressSync(new TextEncoder().encode('Hello World'), 6);
|
||||
const decompressed = gzipTools.decompressSync(compressed);
|
||||
```
|
||||
|
||||
### Browser Bundle Exports
|
||||
|
||||
The `/web` subpath exports these browser-compatible tools:
|
||||
|
||||
| Export | Description |
|
||||
|--------|-------------|
|
||||
| `TarTools` | Create and extract TAR and TAR.GZ archives |
|
||||
| `ZipTools` | Create and extract ZIP archives |
|
||||
| `GzipTools` | GZIP compression and decompression |
|
||||
| `Bzip2Tools` | BZIP2 decompression (extraction only) |
|
||||
|
||||
> 💡 **Note:** The browser bundle does **not** include `SmartArchive` (which requires filesystem access). Use the individual tool classes for browser applications.
|
||||
|
||||
## Core Concepts 💡
|
||||
|
||||
### Fluent Builder Pattern
|
||||
@@ -294,21 +348,14 @@ await SmartArchive.create()
|
||||
// Use GzipTools directly for compression/decompression
|
||||
const gzipTools = new GzipTools();
|
||||
|
||||
// Compress a buffer
|
||||
const compressed = await gzipTools.compress(Buffer.from('Hello World'), 9);
|
||||
const decompressed = await gzipTools.decompress(compressed);
|
||||
// Compress a buffer (sync and async available)
|
||||
const input = new TextEncoder().encode('Hello World');
|
||||
const compressed = gzipTools.compressSync(input, 9);
|
||||
const decompressed = gzipTools.decompressSync(compressed);
|
||||
|
||||
// Synchronous operations
|
||||
const compressedSync = gzipTools.compressSync(inputBuffer, 6);
|
||||
const decompressedSync = gzipTools.decompressSync(compressedSync);
|
||||
|
||||
// Streaming
|
||||
const compressStream = gzipTools.getCompressionStream(6);
|
||||
const decompressStream = gzipTools.getDecompressionStream();
|
||||
|
||||
createReadStream('./input.txt')
|
||||
.pipe(compressStream)
|
||||
.pipe(createWriteStream('./output.gz'));
|
||||
// Async versions (internally use sync for cross-runtime compatibility)
|
||||
const compressedAsync = await gzipTools.compress(input, 6);
|
||||
const decompressedAsync = await gzipTools.decompress(compressedAsync);
|
||||
```
|
||||
|
||||
### Working with TAR archives directly
|
||||
@@ -318,27 +365,90 @@ import { TarTools } from '@push.rocks/smartarchive';
|
||||
|
||||
const tarTools = new TarTools();
|
||||
|
||||
// Create a TAR archive manually
|
||||
const pack = await tarTools.getPackStream();
|
||||
// Create a TAR archive from entries (buffer-based, good for small files)
|
||||
const tarBuffer = await tarTools.packFiles([
|
||||
{ archivePath: 'hello.txt', content: 'Hello, World!' },
|
||||
{ archivePath: 'data.json', content: JSON.stringify({ foo: 'bar' }) }
|
||||
]);
|
||||
|
||||
// Create a TAR.GZ archive
|
||||
const tgzBuffer = await tarTools.packFilesToTarGz([
|
||||
{ archivePath: 'file.txt', content: 'Compressed content' }
|
||||
], 6);
|
||||
|
||||
// Extract a TAR archive
|
||||
const entries = await tarTools.extractTar(tarBuffer);
|
||||
for (const entry of entries) {
|
||||
console.log(`${entry.path}: ${entry.isDirectory ? 'dir' : 'file'}`);
|
||||
}
|
||||
|
||||
// Extract a TAR.GZ archive
|
||||
const tgzEntries = await tarTools.extractTarGz(tgzBuffer);
|
||||
|
||||
// Node.js only: Pack a directory (buffer-based)
|
||||
const dirBuffer = await tarTools.packDirectory('./src');
|
||||
const dirTgzBuffer = await tarTools.packDirectoryToTarGz('./src', 9);
|
||||
```
|
||||
|
||||
### Streaming TAR for Large Files (Node.js only) 🚀
|
||||
|
||||
For large files that don't fit in memory, use the streaming APIs:
|
||||
|
||||
```typescript
|
||||
import { TarTools } from '@push.rocks/smartarchive';
|
||||
import * as fs from 'fs';
|
||||
|
||||
const tarTools = new TarTools();
|
||||
|
||||
// ===== STREAMING PACK =====
|
||||
// Create a TAR pack stream - files are processed one at a time
|
||||
const pack = tarTools.getPackStream();
|
||||
|
||||
// Add files with streaming content (requires size for streams)
|
||||
await tarTools.addFileToPack(pack, {
|
||||
fileName: 'hello.txt',
|
||||
content: 'Hello, World!'
|
||||
fileName: 'small.txt',
|
||||
content: 'Hello World' // Strings and buffers auto-detect size
|
||||
});
|
||||
|
||||
await tarTools.addFileToPack(pack, {
|
||||
fileName: 'data.json',
|
||||
content: Buffer.from(JSON.stringify({ foo: 'bar' }))
|
||||
fileName: 'large-video.mp4',
|
||||
content: fs.createReadStream('./video.mp4'),
|
||||
size: fs.statSync('./video.mp4').size // Size required for streams
|
||||
});
|
||||
|
||||
pack.finalize();
|
||||
pack.pipe(createWriteStream('./output.tar'));
|
||||
pack.pipe(fs.createWriteStream('output.tar'));
|
||||
|
||||
// Pack a directory to TAR.GZ buffer
|
||||
const tgzBuffer = await tarTools.packDirectoryToTarGz('./src', 6);
|
||||
// ===== STREAMING DIRECTORY PACK =====
|
||||
// Pack entire directory with true streaming (no buffering)
|
||||
const tarStream = await tarTools.getDirectoryPackStream('./large-folder');
|
||||
tarStream.pipe(fs.createWriteStream('backup.tar'));
|
||||
|
||||
// Pack a directory to TAR.GZ stream
|
||||
const tgzStream = await tarTools.packDirectoryToTarGzStream('./src');
|
||||
// With GZIP compression
|
||||
const tgzStream = await tarTools.getDirectoryPackStreamGz('./large-folder', 6);
|
||||
tgzStream.pipe(fs.createWriteStream('backup.tar.gz'));
|
||||
|
||||
// ===== STREAMING EXTRACT =====
|
||||
// Extract large archives without loading into memory
|
||||
const extract = tarTools.getExtractStream();
|
||||
|
||||
extract.on('entry', (header, stream, next) => {
|
||||
console.log(`Extracting: ${header.name} (${header.size} bytes)`);
|
||||
|
||||
const writeStream = fs.createWriteStream(`./out/${header.name}`);
|
||||
stream.pipe(writeStream);
|
||||
writeStream.on('finish', next);
|
||||
});
|
||||
|
||||
extract.on('finish', () => console.log('Extraction complete'));
|
||||
|
||||
fs.createReadStream('large-archive.tar').pipe(extract);
|
||||
|
||||
// Or use the convenient directory extraction
|
||||
await tarTools.extractToDirectory(
|
||||
fs.createReadStream('archive.tar'),
|
||||
'./output-folder'
|
||||
);
|
||||
```
|
||||
|
||||
### Working with ZIP archives directly
|
||||
@@ -351,7 +461,7 @@ const zipTools = new ZipTools();
|
||||
// Create a ZIP archive from entries
|
||||
const zipBuffer = await zipTools.createZip([
|
||||
{ archivePath: 'readme.txt', content: 'Hello!' },
|
||||
{ archivePath: 'data.bin', content: Buffer.from([0x00, 0x01, 0x02]) }
|
||||
{ archivePath: 'data.bin', content: new Uint8Array([0x00, 0x01, 0x02]) }
|
||||
], 6);
|
||||
|
||||
// Extract a ZIP buffer
|
||||
@@ -448,13 +558,13 @@ fileStream.on('data', async (file) => {
|
||||
|
||||
## Supported Formats 📋
|
||||
|
||||
| Format | Extension(s) | Extract | Create |
|
||||
|--------|--------------|---------|--------|
|
||||
| TAR | `.tar` | ✅ | ✅ |
|
||||
| TAR.GZ / TGZ | `.tar.gz`, `.tgz` | ✅ | ✅ |
|
||||
| ZIP | `.zip` | ✅ | ✅ |
|
||||
| GZIP | `.gz` | ✅ | ✅ |
|
||||
| BZIP2 | `.bz2` | ✅ | ❌ |
|
||||
| Format | Extension(s) | Extract | Create | Browser |
|
||||
|--------|--------------|---------|--------|---------|
|
||||
| TAR | `.tar` | ✅ | ✅ | ✅ |
|
||||
| TAR.GZ / TGZ | `.tar.gz`, `.tgz` | ✅ | ✅ | ✅ |
|
||||
| ZIP | `.zip` | ✅ | ✅ | ✅ |
|
||||
| GZIP | `.gz` | ✅ | ✅ | ✅ |
|
||||
| BZIP2 | `.bz2` | ✅ | ❌ | ✅ |
|
||||
|
||||
## Type Definitions
|
||||
|
||||
@@ -468,7 +578,7 @@ type TCompressionLevel = 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9;
|
||||
// Entry for creating archives
|
||||
interface IArchiveEntry {
|
||||
archivePath: string;
|
||||
content: string | Buffer | Readable | SmartFile | StreamFile;
|
||||
content: string | Buffer | Uint8Array | SmartFile | StreamFile;
|
||||
size?: number;
|
||||
mode?: number;
|
||||
mtime?: Date;
|
||||
@@ -496,9 +606,9 @@ interface IArchiveInfo {
|
||||
## Performance Tips 🏎️
|
||||
|
||||
1. **Use streaming for large files** – `.toStreamFiles()` processes entries one at a time without loading the entire archive
|
||||
2. **Provide byte lengths when known** – When using TarTools directly, provide `byteLength` for better performance
|
||||
3. **Choose appropriate compression** – Use 1-3 for speed, 6 (default) for balance, 9 for maximum compression
|
||||
4. **Filter early** – Use `.include()`/`.exclude()` to skip unwanted entries before processing
|
||||
2. **Choose appropriate compression** – Use 1-3 for speed, 6 (default) for balance, 9 for maximum compression
|
||||
3. **Filter early** – Use `.include()`/`.exclude()` to skip unwanted entries before processing
|
||||
4. **Use Uint8Array in browsers** – The browser bundle works with `Uint8Array` for optimal performance
|
||||
|
||||
## Error Handling 🛡️
|
||||
|
||||
@@ -524,23 +634,21 @@ try {
|
||||
|
||||
## License and Legal Information
|
||||
|
||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
||||
This repository contains open-source code licensed under the MIT License. A copy of the license can be found in the [LICENSE](./LICENSE) file.
|
||||
|
||||
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||
|
||||
### Trademarks
|
||||
|
||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
|
||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH or third parties, and are not included within the scope of the MIT license granted herein.
|
||||
|
||||
### Issue Reporting and Security
|
||||
|
||||
For reporting bugs, issues, or security vulnerabilities, please visit [community.foss.global/](https://community.foss.global/). This is the central community hub for all issue reporting. Developers who sign and comply with our contribution agreement and go through identification can also get a [code.foss.global/](https://code.foss.global/) account to submit Pull Requests directly.
|
||||
Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines or the guidelines of the respective third-party owners, and any usage must be approved in writing. Third-party trademarks used herein are the property of their respective owners and used only in a descriptive manner, e.g. for an implementation of an API or similar.
|
||||
|
||||
### Company Information
|
||||
|
||||
Task Venture Capital GmbH
|
||||
Registered at District court Bremen HRB 35230 HB, Germany
|
||||
Registered at District Court Bremen HRB 35230 HB, Germany
|
||||
|
||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
||||
For any legal inquiries or further information, please contact us via email at hello@task.vc.
|
||||
|
||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||
|
||||
@@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartarchive',
|
||||
version: '5.0.1',
|
||||
version: '5.2.0',
|
||||
description: 'A library for working with archive files, providing utilities for compressing and decompressing data.'
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { SmartArchive } from './classes.smartarchive.js';
|
||||
import type { TSupportedMime } from './interfaces.js';
|
||||
import type { TSupportedMime } from '../ts_shared/interfaces.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
/**
|
||||
@@ -8,7 +8,7 @@ import * as plugins from './plugins.js';
|
||||
export type TDecompressionStream =
|
||||
| plugins.stream.Transform
|
||||
| plugins.stream.Duplex
|
||||
| plugins.tarStream.Extract;
|
||||
| plugins.smartstream.SmartDuplex<any, any>;
|
||||
|
||||
/**
|
||||
* Result of archive analysis
|
||||
@@ -53,14 +53,42 @@ export class ArchiveAnalyzer {
|
||||
*/
|
||||
private async getDecompressionStream(mimeTypeArg: TSupportedMime): Promise<TDecompressionStream> {
|
||||
switch (mimeTypeArg) {
|
||||
case 'application/gzip':
|
||||
return this.smartArchiveRef.gzipTools.getDecompressionStream();
|
||||
case 'application/gzip': {
|
||||
// Use fflate streaming Gunzip - instance must be created once and reused
|
||||
let gunzip: plugins.fflate.Gunzip;
|
||||
return new plugins.stream.Transform({
|
||||
construct(callback) {
|
||||
gunzip = new plugins.fflate.Gunzip((data, final) => {
|
||||
this.push(Buffer.from(data));
|
||||
});
|
||||
callback();
|
||||
},
|
||||
transform(chunk, encoding, callback) {
|
||||
try {
|
||||
gunzip.push(chunk, false);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
},
|
||||
flush(callback) {
|
||||
try {
|
||||
// Signal end of stream with empty final chunk
|
||||
gunzip.push(new Uint8Array(0), true);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
case 'application/zip':
|
||||
return this.smartArchiveRef.zipTools.getDecompressionStream();
|
||||
case 'application/x-bzip2':
|
||||
return this.smartArchiveRef.bzip2Tools.getDecompressionStream();
|
||||
case 'application/x-tar':
|
||||
return this.smartArchiveRef.tarTools.getDecompressionStream();
|
||||
// TAR doesn't need decompression, just pass through
|
||||
return plugins.smartstream.createPassThrough();
|
||||
default:
|
||||
// Handle unsupported formats or no decompression needed
|
||||
return plugins.smartstream.createPassThrough();
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
import type { SmartArchive } from './classes.smartarchive.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
import { unbzip2Stream } from './bzip2/index.js';
|
||||
|
||||
export class Bzip2Tools {
|
||||
smartArchiveRef: SmartArchive;
|
||||
|
||||
constructor(smartArchiveRefArg: SmartArchive) {
|
||||
this.smartArchiveRef = smartArchiveRefArg;
|
||||
}
|
||||
|
||||
getDecompressionStream() {
|
||||
return unbzip2Stream();
|
||||
}
|
||||
}
|
||||
@@ -1,138 +0,0 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import type { TCompressionLevel } from './interfaces.js';
|
||||
|
||||
/**
|
||||
* Transform stream for GZIP compression using fflate
|
||||
*/
|
||||
export class GzipCompressionTransform extends plugins.stream.Transform {
|
||||
private gzip: plugins.fflate.Gzip;
|
||||
|
||||
constructor(level: TCompressionLevel = 6) {
|
||||
super();
|
||||
|
||||
// Create a streaming Gzip compressor
|
||||
this.gzip = new plugins.fflate.Gzip({ level }, (chunk, final) => {
|
||||
this.push(Buffer.from(chunk));
|
||||
if (final) {
|
||||
this.push(null);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_transform(
|
||||
chunk: Buffer,
|
||||
encoding: BufferEncoding,
|
||||
callback: plugins.stream.TransformCallback
|
||||
): void {
|
||||
try {
|
||||
this.gzip.push(chunk, false);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
}
|
||||
|
||||
_flush(callback: plugins.stream.TransformCallback): void {
|
||||
try {
|
||||
this.gzip.push(new Uint8Array(0), true);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform stream for GZIP decompression using fflate
|
||||
*/
|
||||
export class GzipDecompressionTransform extends plugins.stream.Transform {
|
||||
private gunzip: plugins.fflate.Gunzip;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
|
||||
// Create a streaming Gunzip decompressor
|
||||
this.gunzip = new plugins.fflate.Gunzip((chunk, final) => {
|
||||
this.push(Buffer.from(chunk));
|
||||
if (final) {
|
||||
this.push(null);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_transform(
|
||||
chunk: Buffer,
|
||||
encoding: BufferEncoding,
|
||||
callback: plugins.stream.TransformCallback
|
||||
): void {
|
||||
try {
|
||||
this.gunzip.push(chunk, false);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
}
|
||||
|
||||
_flush(callback: plugins.stream.TransformCallback): void {
|
||||
try {
|
||||
this.gunzip.push(new Uint8Array(0), true);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GZIP compression and decompression utilities
|
||||
*/
|
||||
export class GzipTools {
|
||||
/**
|
||||
* Get a streaming compression transform
|
||||
*/
|
||||
public getCompressionStream(level?: TCompressionLevel): plugins.stream.Transform {
|
||||
return new GzipCompressionTransform(level);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a streaming decompression transform
|
||||
*/
|
||||
public getDecompressionStream(): plugins.stream.Transform {
|
||||
return new GzipDecompressionTransform();
|
||||
}
|
||||
|
||||
/**
|
||||
* Compress data synchronously
|
||||
*/
|
||||
public compressSync(data: Buffer, level?: TCompressionLevel): Buffer {
|
||||
const options = level !== undefined ? { level } : undefined;
|
||||
return Buffer.from(plugins.fflate.gzipSync(data, options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Decompress data synchronously
|
||||
*/
|
||||
public decompressSync(data: Buffer): Buffer {
|
||||
return Buffer.from(plugins.fflate.gunzipSync(data));
|
||||
}
|
||||
|
||||
/**
|
||||
* Compress data asynchronously
|
||||
* Note: Uses sync version for Deno compatibility (fflate async uses Web Workers
|
||||
* which have issues in Deno)
|
||||
*/
|
||||
public async compress(data: Buffer, level?: TCompressionLevel): Promise<Buffer> {
|
||||
// Use sync version wrapped in Promise for cross-runtime compatibility
|
||||
return this.compressSync(data, level);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decompress data asynchronously
|
||||
* Note: Uses sync version for Deno compatibility (fflate async uses Web Workers
|
||||
* which have issues in Deno)
|
||||
*/
|
||||
public async decompress(data: Buffer): Promise<Buffer> {
|
||||
// Use sync version wrapped in Promise for cross-runtime compatibility
|
||||
return this.decompressSync(data);
|
||||
}
|
||||
}
|
||||
@@ -6,12 +6,15 @@ import type {
|
||||
TArchiveFormat,
|
||||
TCompressionLevel,
|
||||
TEntryFilter,
|
||||
} from './interfaces.js';
|
||||
} from '../ts_shared/interfaces.js';
|
||||
|
||||
import { Bzip2Tools } from './classes.bzip2tools.js';
|
||||
import { GzipTools } from './classes.gziptools.js';
|
||||
// Import browser-compatible tools from ts_shared
|
||||
import { Bzip2Tools } from '../ts_shared/classes.bzip2tools.js';
|
||||
import { GzipTools } from '../ts_shared/classes.gziptools.js';
|
||||
import { ZipTools } from '../ts_shared/classes.ziptools.js';
|
||||
|
||||
// Import Node.js-extended TarTools
|
||||
import { TarTools } from './classes.tartools.js';
|
||||
import { ZipTools } from './classes.ziptools.js';
|
||||
import { ArchiveAnalyzer, type IAnalyzedResult } from './classes.archiveanalyzer.js';
|
||||
|
||||
/**
|
||||
@@ -62,7 +65,7 @@ export class SmartArchive {
|
||||
public tarTools = new TarTools();
|
||||
public zipTools = new ZipTools();
|
||||
public gzipTools = new GzipTools();
|
||||
public bzip2Tools = new Bzip2Tools(this);
|
||||
public bzip2Tools = new Bzip2Tools();
|
||||
public archiveAnalyzer = new ArchiveAnalyzer(this);
|
||||
|
||||
// ============================================
|
||||
@@ -173,7 +176,7 @@ export class SmartArchive {
|
||||
public entry(archivePath: string, content: string | Buffer): this {
|
||||
this.ensureNotInExtractMode('entry');
|
||||
if (!this._mode) this._mode = 'create';
|
||||
this.pendingEntries.push({ archivePath, content });
|
||||
this.pendingEntries.push({ archivePath, content: content instanceof Buffer ? new Uint8Array(content) : content });
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -184,7 +187,10 @@ export class SmartArchive {
|
||||
this.ensureNotInExtractMode('entries');
|
||||
if (!this._mode) this._mode = 'create';
|
||||
for (const e of entriesArg) {
|
||||
this.pendingEntries.push({ archivePath: e.archivePath, content: e.content });
|
||||
this.pendingEntries.push({
|
||||
archivePath: e.archivePath,
|
||||
content: e.content instanceof Buffer ? new Uint8Array(e.content) : e.content
|
||||
});
|
||||
}
|
||||
return this;
|
||||
}
|
||||
@@ -374,30 +380,41 @@ export class SmartArchive {
|
||||
plugins.smartstream.createTransformFunction<IAnalyzedResult, void>(
|
||||
async (analyzedResultChunk) => {
|
||||
if (analyzedResultChunk.fileType?.mime === 'application/x-tar') {
|
||||
const tarStream = analyzedResultChunk.decompressionStream as plugins.tarStream.Extract;
|
||||
// Use tar-stream for streaming TAR extraction
|
||||
// Buffer each entry to ensure tar-stream can proceed to next entry
|
||||
const extract = this.tarTools.getExtractStream();
|
||||
|
||||
tarStream.on('entry', async (header, stream, next) => {
|
||||
extract.on('entry', (header, stream, next) => {
|
||||
if (header.type === 'directory') {
|
||||
stream.resume();
|
||||
stream.on('end', () => next());
|
||||
stream.resume(); // Drain the stream
|
||||
next();
|
||||
return;
|
||||
}
|
||||
|
||||
const passThrough = new plugins.stream.PassThrough();
|
||||
const streamfile = plugins.smartfile.StreamFile.fromStream(passThrough, header.name);
|
||||
streamFileIntake.push(streamfile);
|
||||
stream.pipe(passThrough);
|
||||
// Buffer the entry content to avoid blocking tar-stream
|
||||
const chunks: Buffer[] = [];
|
||||
stream.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
stream.on('end', () => {
|
||||
passThrough.end();
|
||||
const content = Buffer.concat(chunks);
|
||||
const streamFile = plugins.smartfile.StreamFile.fromBuffer(content);
|
||||
streamFile.relativeFilePath = header.name;
|
||||
streamFileIntake.push(streamFile);
|
||||
next();
|
||||
});
|
||||
stream.on('error', (err: Error) => {
|
||||
streamFileIntake.emit('error', err);
|
||||
});
|
||||
});
|
||||
|
||||
tarStream.on('finish', () => {
|
||||
extract.on('finish', () => {
|
||||
safeSignalEnd();
|
||||
});
|
||||
|
||||
analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream);
|
||||
extract.on('error', (err: Error) => {
|
||||
streamFileIntake.emit('error', err);
|
||||
});
|
||||
|
||||
analyzedResultChunk.resultStream.pipe(extract);
|
||||
} else if (analyzedResultChunk.fileType?.mime === 'application/zip') {
|
||||
analyzedResultChunk.resultStream
|
||||
.pipe(analyzedResultChunk.decompressionStream)
|
||||
@@ -544,25 +561,29 @@ export class SmartArchive {
|
||||
|
||||
if (this.creationFormat === 'tar' || this.creationFormat === 'tar.gz' || this.creationFormat === 'tgz') {
|
||||
if (this.creationFormat === 'tar') {
|
||||
this.archiveBuffer = await this.tarTools.packFiles(entries);
|
||||
const result = await this.tarTools.packFiles(entries);
|
||||
this.archiveBuffer = Buffer.from(result);
|
||||
} else {
|
||||
this.archiveBuffer = await this.tarTools.packFilesToTarGz(entries, this._compressionLevel);
|
||||
const result = await this.tarTools.packFilesToTarGz(entries, this._compressionLevel);
|
||||
this.archiveBuffer = Buffer.from(result);
|
||||
}
|
||||
} else if (this.creationFormat === 'zip') {
|
||||
this.archiveBuffer = await this.zipTools.createZip(entries, this._compressionLevel);
|
||||
const result = await this.zipTools.createZip(entries, this._compressionLevel);
|
||||
this.archiveBuffer = Buffer.from(result);
|
||||
} else if (this.creationFormat === 'gz') {
|
||||
if (entries.length !== 1) {
|
||||
throw new Error('GZIP format only supports a single file');
|
||||
}
|
||||
let content: Buffer;
|
||||
let content: Uint8Array;
|
||||
if (typeof entries[0].content === 'string') {
|
||||
content = Buffer.from(entries[0].content);
|
||||
} else if (Buffer.isBuffer(entries[0].content)) {
|
||||
content = new TextEncoder().encode(entries[0].content);
|
||||
} else if (entries[0].content instanceof Uint8Array) {
|
||||
content = entries[0].content;
|
||||
} else {
|
||||
throw new Error('GZIP format requires string or Buffer content');
|
||||
throw new Error('GZIP format requires string or Uint8Array content');
|
||||
}
|
||||
this.archiveBuffer = await this.gzipTools.compress(content, this._compressionLevel);
|
||||
const result = await this.gzipTools.compress(content, this._compressionLevel);
|
||||
this.archiveBuffer = Buffer.from(result);
|
||||
} else {
|
||||
throw new Error(`Unsupported format: ${this.creationFormat}`);
|
||||
}
|
||||
@@ -808,7 +829,7 @@ export class SmartArchive {
|
||||
const content = await plugins.fsPromises.readFile(absolutePath);
|
||||
this.pendingEntries.push({
|
||||
archivePath,
|
||||
content,
|
||||
content: new Uint8Array(content),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,208 +1,272 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import type { IArchiveEntry, TCompressionLevel } from './interfaces.js';
|
||||
import { GzipTools } from './classes.gziptools.js';
|
||||
import type { IArchiveEntry, TCompressionLevel } from '../ts_shared/interfaces.js';
|
||||
import { TarTools as SharedTarTools } from '../ts_shared/classes.tartools.js';
|
||||
|
||||
/**
|
||||
* TAR archive creation and extraction utilities
|
||||
* Options for adding a file to a TAR pack stream
|
||||
*/
|
||||
export class TarTools {
|
||||
export interface ITarPackFileOptions {
|
||||
fileName: string;
|
||||
content: string | Buffer | Uint8Array | plugins.stream.Readable;
|
||||
size?: number;
|
||||
mode?: number;
|
||||
mtime?: Date;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extended TAR archive utilities with Node.js streaming support
|
||||
*
|
||||
* For small archives: Use inherited buffer-based methods (packFiles, extractTar, etc.)
|
||||
* For large archives: Use streaming methods (getPackStream, getExtractStream, etc.)
|
||||
*/
|
||||
export class TarTools extends SharedTarTools {
|
||||
// ============================================
|
||||
// STREAMING PACK METHODS (for large files)
|
||||
// ============================================
|
||||
|
||||
/**
|
||||
* Add a file to a TAR pack stream
|
||||
* Get a streaming TAR pack instance
|
||||
* Use this for packing large files without buffering everything in memory
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const pack = tarTools.getPackStream();
|
||||
*
|
||||
* await tarTools.addFileToPack(pack, { fileName: 'large.bin', content: readStream, size: fileSize });
|
||||
* await tarTools.addFileToPack(pack, { fileName: 'small.txt', content: 'Hello World' });
|
||||
*
|
||||
* pack.finalize();
|
||||
* pack.pipe(fs.createWriteStream('output.tar'));
|
||||
* ```
|
||||
*/
|
||||
public async addFileToPack(
|
||||
pack: plugins.tarStream.Pack,
|
||||
optionsArg: {
|
||||
fileName?: string;
|
||||
content?:
|
||||
| string
|
||||
| Buffer
|
||||
| plugins.stream.Readable
|
||||
| plugins.smartfile.SmartFile
|
||||
| plugins.smartfile.StreamFile;
|
||||
byteLength?: number;
|
||||
filePath?: string;
|
||||
}
|
||||
): Promise<void> {
|
||||
return new Promise<void>(async (resolve, reject) => {
|
||||
let fileName: string | null = null;
|
||||
|
||||
if (optionsArg.fileName) {
|
||||
fileName = optionsArg.fileName;
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
|
||||
fileName = optionsArg.content.relative;
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
|
||||
fileName = optionsArg.content.relativeFilePath;
|
||||
} else if (optionsArg.filePath) {
|
||||
fileName = optionsArg.filePath;
|
||||
}
|
||||
|
||||
if (!fileName) {
|
||||
reject(new Error('No filename specified for TAR entry'));
|
||||
return;
|
||||
}
|
||||
|
||||
// Determine content byte length
|
||||
let contentByteLength: number | undefined;
|
||||
if (optionsArg.byteLength) {
|
||||
contentByteLength = optionsArg.byteLength;
|
||||
} else if (typeof optionsArg.content === 'string') {
|
||||
contentByteLength = Buffer.byteLength(optionsArg.content, 'utf8');
|
||||
} else if (Buffer.isBuffer(optionsArg.content)) {
|
||||
contentByteLength = optionsArg.content.length;
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
|
||||
contentByteLength = await optionsArg.content.getSize();
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
|
||||
contentByteLength = await optionsArg.content.getSize();
|
||||
} else if (optionsArg.filePath) {
|
||||
const fileStat = await plugins.fsPromises.stat(optionsArg.filePath);
|
||||
contentByteLength = fileStat.size;
|
||||
}
|
||||
|
||||
// Convert all content types to Readable stream
|
||||
let content: plugins.stream.Readable;
|
||||
if (Buffer.isBuffer(optionsArg.content)) {
|
||||
content = plugins.stream.Readable.from(optionsArg.content);
|
||||
} else if (typeof optionsArg.content === 'string') {
|
||||
content = plugins.stream.Readable.from(Buffer.from(optionsArg.content));
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
|
||||
content = plugins.stream.Readable.from(optionsArg.content.contents);
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
|
||||
content = await optionsArg.content.createReadStream();
|
||||
} else if (optionsArg.content instanceof plugins.stream.Readable) {
|
||||
content = optionsArg.content;
|
||||
} else if (optionsArg.filePath) {
|
||||
content = plugins.fs.createReadStream(optionsArg.filePath);
|
||||
} else {
|
||||
reject(new Error('No content or filePath specified for TAR entry'));
|
||||
return;
|
||||
}
|
||||
|
||||
const entry = pack.entry(
|
||||
{
|
||||
name: fileName,
|
||||
...(contentByteLength !== undefined ? { size: contentByteLength } : {}),
|
||||
},
|
||||
(err: Error | null) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
content.pipe(entry);
|
||||
// Note: resolve() is called in the callback above when pipe completes
|
||||
});
|
||||
public getPackStream(): plugins.tarStream.Pack {
|
||||
return plugins.tarStream.pack();
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack a directory into a TAR stream
|
||||
* Add a file to a TAR pack stream
|
||||
* Supports strings, buffers, and readable streams
|
||||
*
|
||||
* @param pack - The pack stream from getPackStream()
|
||||
* @param options - File options including name, content, and optional metadata
|
||||
*/
|
||||
public async packDirectory(directoryPath: string): Promise<plugins.tarStream.Pack> {
|
||||
const fileTree = await plugins.listFileTree(directoryPath, '**/*');
|
||||
const pack = await this.getPackStream();
|
||||
public async addFileToPack(
|
||||
pack: plugins.tarStream.Pack,
|
||||
options: ITarPackFileOptions
|
||||
): Promise<void> {
|
||||
const { fileName, content, mode = 0o644, mtime = new Date() } = options;
|
||||
|
||||
for (const filePath of fileTree) {
|
||||
const absolutePath = plugins.path.join(directoryPath, filePath);
|
||||
const fileStat = await plugins.fsPromises.stat(absolutePath);
|
||||
await this.addFileToPack(pack, {
|
||||
byteLength: fileStat.size,
|
||||
filePath: absolutePath,
|
||||
fileName: filePath,
|
||||
content: plugins.fs.createReadStream(absolutePath),
|
||||
if (typeof content === 'string') {
|
||||
// String content - convert to buffer
|
||||
const buffer = Buffer.from(content, 'utf8');
|
||||
const entry = pack.entry({
|
||||
name: fileName,
|
||||
size: buffer.length,
|
||||
mode,
|
||||
mtime,
|
||||
});
|
||||
entry.write(buffer);
|
||||
entry.end();
|
||||
} else if (Buffer.isBuffer(content) || content instanceof Uint8Array) {
|
||||
// Buffer content
|
||||
const buffer = Buffer.isBuffer(content) ? content : Buffer.from(content);
|
||||
const entry = pack.entry({
|
||||
name: fileName,
|
||||
size: buffer.length,
|
||||
mode,
|
||||
mtime,
|
||||
});
|
||||
entry.write(buffer);
|
||||
entry.end();
|
||||
} else if (content && typeof (content as any).pipe === 'function') {
|
||||
// Readable stream - requires size to be provided
|
||||
const size = options.size;
|
||||
if (size === undefined) {
|
||||
throw new Error('Size must be provided when adding a stream to TAR pack');
|
||||
}
|
||||
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const entry = pack.entry({
|
||||
name: fileName,
|
||||
size,
|
||||
mode,
|
||||
mtime,
|
||||
}, (err) => {
|
||||
if (err) reject(err);
|
||||
else resolve();
|
||||
});
|
||||
|
||||
(content as plugins.stream.Readable).pipe(entry);
|
||||
});
|
||||
} else {
|
||||
throw new Error('Unsupported content type for TAR entry');
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// STREAMING EXTRACT METHODS (for large files)
|
||||
// ============================================
|
||||
|
||||
/**
|
||||
* Get a streaming TAR extract instance
|
||||
* Use this for extracting large archives without buffering everything in memory
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const extract = tarTools.getExtractStream();
|
||||
*
|
||||
* extract.on('entry', (header, stream, next) => {
|
||||
* console.log(`Extracting: ${header.name}`);
|
||||
* stream.pipe(fs.createWriteStream(`./out/${header.name}`));
|
||||
* stream.on('end', next);
|
||||
* });
|
||||
*
|
||||
* fs.createReadStream('archive.tar').pipe(extract);
|
||||
* ```
|
||||
*/
|
||||
public getExtractStream(): plugins.tarStream.Extract {
|
||||
return plugins.tarStream.extract();
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a TAR stream to a directory with true streaming (no buffering)
|
||||
*
|
||||
* @param sourceStream - The TAR archive stream
|
||||
* @param targetDir - Directory to extract files to
|
||||
*/
|
||||
public async extractToDirectory(
|
||||
sourceStream: plugins.stream.Readable,
|
||||
targetDir: string
|
||||
): Promise<void> {
|
||||
await plugins.fsPromises.mkdir(targetDir, { recursive: true });
|
||||
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const extract = this.getExtractStream();
|
||||
|
||||
extract.on('entry', async (header, stream, next) => {
|
||||
const filePath = plugins.path.join(targetDir, header.name);
|
||||
|
||||
if (header.type === 'directory') {
|
||||
await plugins.fsPromises.mkdir(filePath, { recursive: true });
|
||||
stream.resume(); // Drain the stream
|
||||
next();
|
||||
} else if (header.type === 'file') {
|
||||
await plugins.fsPromises.mkdir(plugins.path.dirname(filePath), { recursive: true });
|
||||
const writeStream = plugins.fs.createWriteStream(filePath);
|
||||
stream.pipe(writeStream);
|
||||
writeStream.on('finish', next);
|
||||
writeStream.on('error', reject);
|
||||
} else {
|
||||
stream.resume(); // Skip other types
|
||||
next();
|
||||
}
|
||||
});
|
||||
|
||||
extract.on('finish', resolve);
|
||||
extract.on('error', reject);
|
||||
|
||||
sourceStream.pipe(extract);
|
||||
});
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// STREAMING DIRECTORY PACK (for large directories)
|
||||
// ============================================
|
||||
|
||||
/**
|
||||
* Pack a directory into a TAR stream with true streaming (no buffering)
|
||||
* Files are read and written one at a time, never loading everything into memory
|
||||
*/
|
||||
public async getDirectoryPackStream(directoryPath: string): Promise<plugins.tarStream.Pack> {
|
||||
const pack = this.getPackStream();
|
||||
const fileTree = await plugins.listFileTree(directoryPath, '**/*');
|
||||
|
||||
// Process files sequentially to avoid memory issues
|
||||
(async () => {
|
||||
for (const filePath of fileTree) {
|
||||
const absolutePath = plugins.path.join(directoryPath, filePath);
|
||||
const stat = await plugins.fsPromises.stat(absolutePath);
|
||||
|
||||
if (stat.isFile()) {
|
||||
const readStream = plugins.fs.createReadStream(absolutePath);
|
||||
await this.addFileToPack(pack, {
|
||||
fileName: filePath,
|
||||
content: readStream,
|
||||
size: stat.size,
|
||||
mode: stat.mode,
|
||||
mtime: stat.mtime,
|
||||
});
|
||||
}
|
||||
}
|
||||
pack.finalize();
|
||||
})().catch((err) => pack.destroy(err));
|
||||
|
||||
return pack;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a new TAR pack stream
|
||||
* Pack a directory into a TAR.GZ stream with true streaming
|
||||
* Uses Node.js zlib for streaming compression
|
||||
*/
|
||||
public async getPackStream(): Promise<plugins.tarStream.Pack> {
|
||||
return plugins.tarStream.pack();
|
||||
public async getDirectoryPackStreamGz(
|
||||
directoryPath: string,
|
||||
compressionLevel?: TCompressionLevel
|
||||
): Promise<plugins.stream.Readable> {
|
||||
const tarStream = await this.getDirectoryPackStream(directoryPath);
|
||||
const { createGzip } = await import('node:zlib');
|
||||
const gzip = createGzip({ level: compressionLevel ?? 6 });
|
||||
return tarStream.pipe(gzip);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a TAR extraction stream
|
||||
*/
|
||||
public getDecompressionStream(): plugins.tarStream.Extract {
|
||||
return plugins.tarStream.extract();
|
||||
}
|
||||
// ============================================
|
||||
// BUFFER-BASED METHODS (inherited + filesystem)
|
||||
// ============================================
|
||||
|
||||
/**
|
||||
* Pack files into a TAR buffer
|
||||
* Pack a directory into a TAR buffer (loads all files into memory)
|
||||
* For large directories, use getDirectoryPackStream() instead
|
||||
*/
|
||||
public async packFiles(files: IArchiveEntry[]): Promise<Buffer> {
|
||||
const pack = await this.getPackStream();
|
||||
public async packDirectory(directoryPath: string): Promise<Uint8Array> {
|
||||
const fileTree = await plugins.listFileTree(directoryPath, '**/*');
|
||||
const entries: IArchiveEntry[] = [];
|
||||
|
||||
for (const file of files) {
|
||||
await this.addFileToPack(pack, {
|
||||
fileName: file.archivePath,
|
||||
content: file.content as string | Buffer | plugins.stream.Readable | plugins.smartfile.SmartFile | plugins.smartfile.StreamFile,
|
||||
byteLength: file.size,
|
||||
});
|
||||
for (const filePath of fileTree) {
|
||||
const absolutePath = plugins.path.join(directoryPath, filePath);
|
||||
const stat = await plugins.fsPromises.stat(absolutePath);
|
||||
|
||||
if (stat.isFile()) {
|
||||
const content = await plugins.fsPromises.readFile(absolutePath);
|
||||
entries.push({
|
||||
archivePath: filePath,
|
||||
content: new Uint8Array(content),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pack.finalize();
|
||||
|
||||
const chunks: Buffer[] = [];
|
||||
return new Promise((resolve, reject) => {
|
||||
pack.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
pack.on('end', () => resolve(Buffer.concat(chunks)));
|
||||
pack.on('error', reject);
|
||||
});
|
||||
return this.packFiles(entries);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack a directory into a TAR.GZ buffer
|
||||
* Pack a directory into a TAR.GZ buffer (loads all files into memory)
|
||||
* For large directories, use getDirectoryPackStreamGz() instead
|
||||
*/
|
||||
public async packDirectoryToTarGz(
|
||||
directoryPath: string,
|
||||
compressionLevel?: TCompressionLevel
|
||||
): Promise<Buffer> {
|
||||
const pack = await this.packDirectory(directoryPath);
|
||||
pack.finalize();
|
||||
|
||||
const gzipTools = new GzipTools();
|
||||
const gzipStream = gzipTools.getCompressionStream(compressionLevel);
|
||||
|
||||
const chunks: Buffer[] = [];
|
||||
return new Promise((resolve, reject) => {
|
||||
pack
|
||||
.pipe(gzipStream)
|
||||
.on('data', (chunk: Buffer) => chunks.push(chunk))
|
||||
.on('end', () => resolve(Buffer.concat(chunks)))
|
||||
.on('error', reject);
|
||||
});
|
||||
): Promise<Uint8Array> {
|
||||
const tarBuffer = await this.packDirectory(directoryPath);
|
||||
const { gzipSync } = await import('fflate');
|
||||
return gzipSync(new Uint8Array(tarBuffer), { level: compressionLevel ?? 6 });
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack a directory into a TAR.GZ stream
|
||||
* This is now a true streaming implementation
|
||||
*/
|
||||
public async packDirectoryToTarGzStream(
|
||||
directoryPath: string,
|
||||
compressionLevel?: TCompressionLevel
|
||||
): Promise<plugins.stream.Readable> {
|
||||
const pack = await this.packDirectory(directoryPath);
|
||||
pack.finalize();
|
||||
|
||||
const gzipTools = new GzipTools();
|
||||
const gzipStream = gzipTools.getCompressionStream(compressionLevel);
|
||||
|
||||
return pack.pipe(gzipStream);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack files into a TAR.GZ buffer
|
||||
*/
|
||||
public async packFilesToTarGz(
|
||||
files: IArchiveEntry[],
|
||||
compressionLevel?: TCompressionLevel
|
||||
): Promise<Buffer> {
|
||||
const tarBuffer = await this.packFiles(files);
|
||||
const gzipTools = new GzipTools();
|
||||
return gzipTools.compress(tarBuffer, compressionLevel);
|
||||
return this.getDirectoryPackStreamGz(directoryPath, compressionLevel);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,196 +0,0 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import type { IArchiveEntry, TCompressionLevel } from './interfaces.js';
|
||||
|
||||
/**
|
||||
* Transform stream for ZIP decompression using fflate
|
||||
* Emits StreamFile objects for each file in the archive
|
||||
*/
|
||||
export class ZipDecompressionTransform extends plugins.smartstream.SmartDuplex<Buffer, plugins.smartfile.StreamFile> {
|
||||
private streamtools!: plugins.smartstream.IStreamTools;
|
||||
private unzipper = new plugins.fflate.Unzip(async (fileArg) => {
|
||||
let resultBuffer: Buffer;
|
||||
fileArg.ondata = async (_flateError, dat, final) => {
|
||||
resultBuffer
|
||||
? (resultBuffer = Buffer.concat([resultBuffer, Buffer.from(dat)]))
|
||||
: (resultBuffer = Buffer.from(dat));
|
||||
if (final) {
|
||||
const streamFile = plugins.smartfile.StreamFile.fromBuffer(resultBuffer);
|
||||
streamFile.relativeFilePath = fileArg.name;
|
||||
this.streamtools.push(streamFile);
|
||||
}
|
||||
};
|
||||
fileArg.start();
|
||||
});
|
||||
|
||||
constructor() {
|
||||
super({
|
||||
objectMode: true,
|
||||
writeFunction: async (chunkArg, streamtoolsArg) => {
|
||||
this.streamtools ? null : (this.streamtools = streamtoolsArg);
|
||||
this.unzipper.push(
|
||||
Buffer.isBuffer(chunkArg) ? chunkArg : Buffer.from(chunkArg as unknown as ArrayBuffer),
|
||||
false
|
||||
);
|
||||
return null;
|
||||
},
|
||||
finalFunction: async () => {
|
||||
this.unzipper.push(Buffer.from(''), true);
|
||||
await plugins.smartdelay.delayFor(0);
|
||||
await this.streamtools.push(null);
|
||||
return null;
|
||||
},
|
||||
});
|
||||
this.unzipper.register(plugins.fflate.UnzipInflate);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Streaming ZIP compression using fflate
|
||||
* Allows adding multiple entries before finalizing
|
||||
*/
|
||||
export class ZipCompressionStream extends plugins.stream.Duplex {
|
||||
private files: Map<string, { data: Uint8Array; options?: plugins.fflate.ZipOptions }> = new Map();
|
||||
private finalized = false;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a file entry to the ZIP archive
|
||||
*/
|
||||
public async addEntry(
|
||||
fileName: string,
|
||||
content: Buffer | plugins.stream.Readable,
|
||||
options?: { compressionLevel?: TCompressionLevel }
|
||||
): Promise<void> {
|
||||
if (this.finalized) {
|
||||
throw new Error('Cannot add entries to a finalized ZIP archive');
|
||||
}
|
||||
|
||||
let data: Buffer;
|
||||
if (Buffer.isBuffer(content)) {
|
||||
data = content;
|
||||
} else {
|
||||
// Collect stream to buffer
|
||||
const chunks: Buffer[] = [];
|
||||
for await (const chunk of content) {
|
||||
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
||||
}
|
||||
data = Buffer.concat(chunks);
|
||||
}
|
||||
|
||||
this.files.set(fileName, {
|
||||
data: new Uint8Array(data),
|
||||
options: options?.compressionLevel !== undefined ? { level: options.compressionLevel } : undefined,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Finalize the ZIP archive and emit the compressed data
|
||||
*/
|
||||
public async finalize(): Promise<void> {
|
||||
if (this.finalized) {
|
||||
return;
|
||||
}
|
||||
this.finalized = true;
|
||||
|
||||
const filesObj: plugins.fflate.Zippable = {};
|
||||
for (const [name, { data, options }] of this.files) {
|
||||
filesObj[name] = options ? [data, options] : data;
|
||||
}
|
||||
|
||||
// Use sync version for Deno compatibility (fflate async uses Web Workers)
|
||||
try {
|
||||
const result = plugins.fflate.zipSync(filesObj);
|
||||
this.push(Buffer.from(result));
|
||||
this.push(null);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
_read(): void {
|
||||
// No-op: data is pushed when finalize() is called
|
||||
}
|
||||
|
||||
_write(
|
||||
_chunk: Buffer,
|
||||
_encoding: BufferEncoding,
|
||||
callback: (error?: Error | null) => void
|
||||
): void {
|
||||
// Not used for ZIP creation - use addEntry() instead
|
||||
callback(new Error('Use addEntry() to add files to the ZIP archive'));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* ZIP compression and decompression utilities
|
||||
*/
|
||||
export class ZipTools {
|
||||
/**
|
||||
* Get a streaming compression object for creating ZIP archives
|
||||
*/
|
||||
public getCompressionStream(): ZipCompressionStream {
|
||||
return new ZipCompressionStream();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a streaming decompression transform for extracting ZIP archives
|
||||
*/
|
||||
public getDecompressionStream(): ZipDecompressionTransform {
|
||||
return new ZipDecompressionTransform();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a ZIP archive from an array of entries
|
||||
*/
|
||||
public async createZip(entries: IArchiveEntry[], compressionLevel?: TCompressionLevel): Promise<Buffer> {
|
||||
const filesObj: plugins.fflate.Zippable = {};
|
||||
|
||||
for (const entry of entries) {
|
||||
let data: Uint8Array;
|
||||
|
||||
if (typeof entry.content === 'string') {
|
||||
data = new TextEncoder().encode(entry.content);
|
||||
} else if (Buffer.isBuffer(entry.content)) {
|
||||
data = new Uint8Array(entry.content);
|
||||
} else if (entry.content instanceof plugins.smartfile.SmartFile) {
|
||||
data = new Uint8Array(entry.content.contents);
|
||||
} else if (entry.content instanceof plugins.smartfile.StreamFile) {
|
||||
const buffer = await entry.content.getContentAsBuffer();
|
||||
data = new Uint8Array(buffer);
|
||||
} else {
|
||||
// Readable stream
|
||||
const chunks: Buffer[] = [];
|
||||
for await (const chunk of entry.content as plugins.stream.Readable) {
|
||||
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
||||
}
|
||||
data = new Uint8Array(Buffer.concat(chunks));
|
||||
}
|
||||
|
||||
if (compressionLevel !== undefined) {
|
||||
filesObj[entry.archivePath] = [data, { level: compressionLevel }];
|
||||
} else {
|
||||
filesObj[entry.archivePath] = data;
|
||||
}
|
||||
}
|
||||
|
||||
// Use sync version for Deno compatibility (fflate async uses Web Workers)
|
||||
const result = plugins.fflate.zipSync(filesObj);
|
||||
return Buffer.from(result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a ZIP buffer to an array of entries
|
||||
*/
|
||||
public async extractZip(data: Buffer): Promise<Array<{ path: string; content: Buffer }>> {
|
||||
// Use sync version for Deno compatibility (fflate async uses Web Workers)
|
||||
const result = plugins.fflate.unzipSync(data);
|
||||
const entries: Array<{ path: string; content: Buffer }> = [];
|
||||
for (const [path, content] of Object.entries(result)) {
|
||||
entries.push({ path, content: Buffer.from(content) });
|
||||
}
|
||||
return entries;
|
||||
}
|
||||
}
|
||||
18
ts/index.ts
18
ts/index.ts
@@ -1,15 +1,11 @@
|
||||
// Core types and errors
|
||||
export * from './interfaces.js';
|
||||
export * from './errors.js';
|
||||
// Re-export everything from ts_shared (browser-compatible)
|
||||
export * from '../ts_shared/index.js';
|
||||
|
||||
// Main archive class
|
||||
// Node.js-specific: Main archive class with filesystem support
|
||||
export * from './classes.smartarchive.js';
|
||||
|
||||
// Format-specific tools
|
||||
export * from './classes.tartools.js';
|
||||
export * from './classes.ziptools.js';
|
||||
export * from './classes.gziptools.js';
|
||||
export * from './classes.bzip2tools.js';
|
||||
|
||||
// Archive analysis
|
||||
// Node.js-specific: Archive analysis with SmartArchive integration
|
||||
export * from './classes.archiveanalyzer.js';
|
||||
|
||||
// Node.js-specific: Extended TarTools with streaming support (overrides shared TarTools)
|
||||
export { TarTools, type ITarPackFileOptions } from './classes.tartools.js';
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// node native scope
|
||||
// Node.js native scope
|
||||
import * as path from 'node:path';
|
||||
import * as stream from 'node:stream';
|
||||
import * as fs from 'node:fs';
|
||||
@@ -30,32 +30,24 @@ export async function listFileTree(dirPath: string, _pattern: string = '**/*'):
|
||||
return results;
|
||||
}
|
||||
|
||||
// @pushrocks scope
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartdelay from '@push.rocks/smartdelay';
|
||||
// Re-export browser-compatible plugins from ts_shared
|
||||
export * from '../ts_shared/plugins.js';
|
||||
|
||||
// Additional Node.js-specific @pushrocks packages
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smartrequest from '@push.rocks/smartrequest';
|
||||
import * as smartunique from '@push.rocks/smartunique';
|
||||
import * as smartstream from '@push.rocks/smartstream';
|
||||
import * as smartrx from '@push.rocks/smartrx';
|
||||
import * as smarturl from '@push.rocks/smarturl';
|
||||
|
||||
export {
|
||||
smartfile,
|
||||
smartdelay,
|
||||
smartpath,
|
||||
smartpromise,
|
||||
smartrequest,
|
||||
smartunique,
|
||||
smartstream,
|
||||
smartrx,
|
||||
smarturl,
|
||||
};
|
||||
|
||||
// third party scope
|
||||
import * as fileType from 'file-type';
|
||||
import * as fflate from 'fflate';
|
||||
import tarStream from 'tar-stream';
|
||||
|
||||
export { fileType, fflate, tarStream };
|
||||
// Node.js-specific: tar-stream for true streaming TAR support
|
||||
import * as tarStream from 'tar-stream';
|
||||
export { tarStream };
|
||||
|
||||
@@ -6,7 +6,7 @@ const BITMASK = [0, 0x01, 0x03, 0x07, 0x0f, 0x1f, 0x3f, 0x7f, 0xff] as const;
|
||||
* Creates a bit reader function for BZIP2 decompression.
|
||||
* Takes a buffer iterator as input and returns a function that reads bits.
|
||||
*/
|
||||
export function bitIterator(nextBuffer: () => Buffer): IBitReader {
|
||||
export function bitIterator(nextBuffer: () => Uint8Array): IBitReader {
|
||||
let bit = 0;
|
||||
let byte = 0;
|
||||
let bytes = nextBuffer();
|
||||
@@ -71,7 +71,7 @@ export class Bzip2 {
|
||||
/**
|
||||
* Create a bit reader from a byte array
|
||||
*/
|
||||
array(bytes: Uint8Array | Buffer): (n: number) => number {
|
||||
array(bytes: Uint8Array): (n: number) => number {
|
||||
let bit = 0;
|
||||
let byte = 0;
|
||||
const BITMASK = [0, 0x01, 0x03, 0x07, 0x0f, 0x1f, 0x3f, 0x7f, 0xff];
|
||||
@@ -99,7 +99,7 @@ export class Bzip2 {
|
||||
/**
|
||||
* Simple decompression from a buffer
|
||||
*/
|
||||
simple(srcbuffer: Uint8Array | Buffer, stream: (byte: number) => void): void {
|
||||
simple(srcbuffer: Uint8Array, stream: (byte: number) => void): void {
|
||||
const bits = this.array(srcbuffer);
|
||||
const size = this.header(bits as IBitReader);
|
||||
let ret: number | null = 0;
|
||||
@@ -8,16 +8,16 @@ import { bitIterator } from './bititerator.js';
|
||||
/**
|
||||
* Creates a streaming BZIP2 decompression transform
|
||||
*/
|
||||
export function unbzip2Stream(): plugins.smartstream.SmartDuplex<Buffer, Buffer> {
|
||||
export function unbzip2Stream(): plugins.smartstream.SmartDuplex<Uint8Array, Uint8Array> {
|
||||
const bzip2Instance = new Bzip2();
|
||||
const bufferQueue: Buffer[] = [];
|
||||
const bufferQueue: Uint8Array[] = [];
|
||||
let hasBytes = 0;
|
||||
let blockSize = 0;
|
||||
let broken = false;
|
||||
let bitReader: IBitReader | null = null;
|
||||
let streamCRC: number | null = null;
|
||||
|
||||
function decompressBlock(): Buffer | undefined {
|
||||
function decompressBlock(): Uint8Array | undefined {
|
||||
if (!blockSize) {
|
||||
blockSize = bzip2Instance.header(bitReader!);
|
||||
streamCRC = 0;
|
||||
@@ -40,12 +40,12 @@ export function unbzip2Stream(): plugins.smartstream.SmartDuplex<Buffer, Buffer>
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return Buffer.from(chunk);
|
||||
return new Uint8Array(chunk);
|
||||
}
|
||||
|
||||
let outlength = 0;
|
||||
|
||||
const decompressAndPush = async (): Promise<Buffer | undefined> => {
|
||||
const decompressAndPush = async (): Promise<Uint8Array | undefined> => {
|
||||
if (broken) return undefined;
|
||||
|
||||
try {
|
||||
@@ -63,7 +63,7 @@ export function unbzip2Stream(): plugins.smartstream.SmartDuplex<Buffer, Buffer>
|
||||
}
|
||||
};
|
||||
|
||||
return new plugins.smartstream.SmartDuplex<Buffer, Buffer>({
|
||||
return new plugins.smartstream.SmartDuplex<Uint8Array, Uint8Array>({
|
||||
objectMode: true,
|
||||
name: 'bzip2',
|
||||
highWaterMark: 1,
|
||||
14
ts_shared/classes.bzip2tools.ts
Normal file
14
ts_shared/classes.bzip2tools.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import { unbzip2Stream } from './bzip2/index.js';
|
||||
|
||||
/**
|
||||
* BZIP2 decompression utilities (browser-compatible)
|
||||
*/
|
||||
export class Bzip2Tools {
|
||||
/**
|
||||
* Get a streaming decompression transform
|
||||
*/
|
||||
getDecompressionStream(): plugins.smartstream.SmartDuplex<Uint8Array, Uint8Array> {
|
||||
return unbzip2Stream();
|
||||
}
|
||||
}
|
||||
42
ts_shared/classes.gziptools.ts
Normal file
42
ts_shared/classes.gziptools.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import type { TCompressionLevel } from './interfaces.js';
|
||||
|
||||
/**
|
||||
* GZIP compression and decompression utilities (browser-compatible)
|
||||
*/
|
||||
export class GzipTools {
|
||||
/**
|
||||
* Compress data synchronously
|
||||
*/
|
||||
public compressSync(data: Uint8Array, level?: TCompressionLevel): Uint8Array {
|
||||
const options = level !== undefined ? { level } : undefined;
|
||||
return plugins.fflate.gzipSync(data, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decompress data synchronously
|
||||
*/
|
||||
public decompressSync(data: Uint8Array): Uint8Array {
|
||||
return plugins.fflate.gunzipSync(data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compress data asynchronously
|
||||
* Note: Uses sync version for Deno compatibility (fflate async uses Web Workers
|
||||
* which have issues in Deno)
|
||||
*/
|
||||
public async compress(data: Uint8Array, level?: TCompressionLevel): Promise<Uint8Array> {
|
||||
// Use sync version wrapped in Promise for cross-runtime compatibility
|
||||
return this.compressSync(data, level);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decompress data asynchronously
|
||||
* Note: Uses sync version for Deno compatibility (fflate async uses Web Workers
|
||||
* which have issues in Deno)
|
||||
*/
|
||||
public async decompress(data: Uint8Array): Promise<Uint8Array> {
|
||||
// Use sync version wrapped in Promise for cross-runtime compatibility
|
||||
return this.decompressSync(data);
|
||||
}
|
||||
}
|
||||
89
ts_shared/classes.tartools.ts
Normal file
89
ts_shared/classes.tartools.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import type { IArchiveEntry, ITarEntry, TCompressionLevel } from './interfaces.js';
|
||||
import { GzipTools } from './classes.gziptools.js';
|
||||
|
||||
/**
|
||||
* TAR archive creation and extraction utilities using modern-tar (browser-compatible)
|
||||
*/
|
||||
export class TarTools {
|
||||
/**
|
||||
* Pack files into a TAR buffer
|
||||
*/
|
||||
public async packFiles(files: IArchiveEntry[]): Promise<Uint8Array> {
|
||||
const entries: ITarEntry[] = [];
|
||||
|
||||
for (const file of files) {
|
||||
let data: Uint8Array;
|
||||
|
||||
if (typeof file.content === 'string') {
|
||||
data = new TextEncoder().encode(file.content);
|
||||
} else if (file.content instanceof Uint8Array) {
|
||||
data = file.content;
|
||||
} else if (file.content instanceof plugins.smartfile.SmartFile) {
|
||||
data = new Uint8Array(file.content.contents);
|
||||
} else if (file.content instanceof plugins.smartfile.StreamFile) {
|
||||
const buffer = await file.content.getContentAsBuffer();
|
||||
data = new Uint8Array(buffer);
|
||||
} else {
|
||||
throw new Error('Unsupported content type for TAR entry');
|
||||
}
|
||||
|
||||
entries.push({
|
||||
header: {
|
||||
name: file.archivePath,
|
||||
size: data.length,
|
||||
type: 'file',
|
||||
mode: file.mode,
|
||||
mtime: file.mtime,
|
||||
},
|
||||
body: data,
|
||||
});
|
||||
}
|
||||
|
||||
return plugins.modernTar.packTar(entries);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a TAR buffer to an array of entries
|
||||
*/
|
||||
public async extractTar(data: Uint8Array): Promise<Array<{ path: string; content: Uint8Array; isDirectory: boolean }>> {
|
||||
const entries = await plugins.modernTar.unpackTar(data);
|
||||
const result: Array<{ path: string; content: Uint8Array; isDirectory: boolean }> = [];
|
||||
|
||||
for (const entry of entries) {
|
||||
const isDirectory = entry.header.type === 'directory' || entry.header.name.endsWith('/');
|
||||
|
||||
// modern-tar uses 'data' property, not 'body'
|
||||
const content = entry.data ?? new Uint8Array(0);
|
||||
|
||||
result.push({
|
||||
path: entry.header.name,
|
||||
content,
|
||||
isDirectory,
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack files into a TAR.GZ buffer
|
||||
*/
|
||||
public async packFilesToTarGz(
|
||||
files: IArchiveEntry[],
|
||||
compressionLevel?: TCompressionLevel
|
||||
): Promise<Uint8Array> {
|
||||
const tarBuffer = await this.packFiles(files);
|
||||
const gzipTools = new GzipTools();
|
||||
return gzipTools.compress(tarBuffer, compressionLevel);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a TAR.GZ buffer to an array of entries
|
||||
*/
|
||||
public async extractTarGz(data: Uint8Array): Promise<Array<{ path: string; content: Uint8Array; isDirectory: boolean }>> {
|
||||
const gzipTools = new GzipTools();
|
||||
const tarBuffer = await gzipTools.decompress(data);
|
||||
return this.extractTar(tarBuffer);
|
||||
}
|
||||
}
|
||||
107
ts_shared/classes.ziptools.ts
Normal file
107
ts_shared/classes.ziptools.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import type { IArchiveEntry, TCompressionLevel } from './interfaces.js';
|
||||
|
||||
/**
|
||||
* Transform stream for ZIP decompression using fflate
|
||||
* Emits StreamFile objects for each file in the archive
|
||||
*/
|
||||
export class ZipDecompressionTransform extends plugins.smartstream.SmartDuplex<Uint8Array, plugins.smartfile.StreamFile> {
|
||||
private streamtools!: plugins.smartstream.IStreamTools;
|
||||
private unzipper = new plugins.fflate.Unzip(async (fileArg) => {
|
||||
let resultBuffer: Uint8Array;
|
||||
fileArg.ondata = async (_flateError, dat, final) => {
|
||||
if (resultBuffer) {
|
||||
const combined = new Uint8Array(resultBuffer.length + dat.length);
|
||||
combined.set(resultBuffer);
|
||||
combined.set(dat, resultBuffer.length);
|
||||
resultBuffer = combined;
|
||||
} else {
|
||||
resultBuffer = new Uint8Array(dat);
|
||||
}
|
||||
if (final) {
|
||||
const streamFile = plugins.smartfile.StreamFile.fromBuffer(Buffer.from(resultBuffer));
|
||||
streamFile.relativeFilePath = fileArg.name;
|
||||
this.streamtools.push(streamFile);
|
||||
}
|
||||
};
|
||||
fileArg.start();
|
||||
});
|
||||
|
||||
constructor() {
|
||||
super({
|
||||
objectMode: true,
|
||||
writeFunction: async (chunkArg, streamtoolsArg) => {
|
||||
this.streamtools ? null : (this.streamtools = streamtoolsArg);
|
||||
const chunk = chunkArg instanceof Uint8Array ? chunkArg : new Uint8Array(chunkArg);
|
||||
this.unzipper.push(chunk, false);
|
||||
return null;
|
||||
},
|
||||
finalFunction: async () => {
|
||||
this.unzipper.push(new Uint8Array(0), true);
|
||||
await plugins.smartdelay.delayFor(0);
|
||||
await this.streamtools.push(null);
|
||||
return null;
|
||||
},
|
||||
});
|
||||
this.unzipper.register(plugins.fflate.UnzipInflate);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* ZIP compression and decompression utilities
|
||||
*/
|
||||
export class ZipTools {
|
||||
/**
|
||||
* Get a streaming decompression transform for extracting ZIP archives
|
||||
*/
|
||||
public getDecompressionStream(): ZipDecompressionTransform {
|
||||
return new ZipDecompressionTransform();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a ZIP archive from an array of entries
|
||||
*/
|
||||
public async createZip(entries: IArchiveEntry[], compressionLevel?: TCompressionLevel): Promise<Uint8Array> {
|
||||
const filesObj: plugins.fflate.Zippable = {};
|
||||
|
||||
for (const entry of entries) {
|
||||
let data: Uint8Array;
|
||||
|
||||
if (typeof entry.content === 'string') {
|
||||
data = new TextEncoder().encode(entry.content);
|
||||
} else if (entry.content instanceof Uint8Array) {
|
||||
data = entry.content;
|
||||
} else if (entry.content instanceof plugins.smartfile.SmartFile) {
|
||||
data = new Uint8Array(entry.content.contents);
|
||||
} else if (entry.content instanceof plugins.smartfile.StreamFile) {
|
||||
const buffer = await entry.content.getContentAsBuffer();
|
||||
data = new Uint8Array(buffer);
|
||||
} else {
|
||||
throw new Error('Unsupported content type for ZIP entry');
|
||||
}
|
||||
|
||||
if (compressionLevel !== undefined) {
|
||||
filesObj[entry.archivePath] = [data, { level: compressionLevel }];
|
||||
} else {
|
||||
filesObj[entry.archivePath] = data;
|
||||
}
|
||||
}
|
||||
|
||||
// Use sync version for Deno compatibility (fflate async uses Web Workers)
|
||||
const result = plugins.fflate.zipSync(filesObj);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a ZIP buffer to an array of entries
|
||||
*/
|
||||
public async extractZip(data: Uint8Array): Promise<Array<{ path: string; content: Uint8Array }>> {
|
||||
// Use sync version for Deno compatibility (fflate async uses Web Workers)
|
||||
const result = plugins.fflate.unzipSync(data);
|
||||
const entries: Array<{ path: string; content: Uint8Array }> = [];
|
||||
for (const [path, content] of Object.entries(result)) {
|
||||
entries.push({ path, content });
|
||||
}
|
||||
return entries;
|
||||
}
|
||||
}
|
||||
17
ts_shared/index.ts
Normal file
17
ts_shared/index.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
// ts_shared - Browser-compatible shared code
|
||||
|
||||
// Interfaces and types
|
||||
export * from './interfaces.js';
|
||||
|
||||
// Error classes
|
||||
export * from './errors.js';
|
||||
|
||||
// Tool classes
|
||||
export { ZipTools, ZipDecompressionTransform } from './classes.ziptools.js';
|
||||
export { GzipTools } from './classes.gziptools.js';
|
||||
export { TarTools } from './classes.tartools.js';
|
||||
export { Bzip2Tools } from './classes.bzip2tools.js';
|
||||
|
||||
// BZIP2 internals (for advanced usage)
|
||||
export { unbzip2Stream } from './bzip2/index.js';
|
||||
export { Bzip2 } from './bzip2/bzip2.js';
|
||||
@@ -1,4 +1,3 @@
|
||||
import type * as stream from 'node:stream';
|
||||
import type { SmartFile, StreamFile } from '@push.rocks/smartfile';
|
||||
|
||||
/**
|
||||
@@ -22,13 +21,13 @@ export type TSupportedMime =
|
||||
| undefined;
|
||||
|
||||
/**
|
||||
* Entry to add to an archive during creation
|
||||
* Entry to add to an archive during creation (browser-compatible)
|
||||
*/
|
||||
export interface IArchiveEntry {
|
||||
/** Path within the archive */
|
||||
archivePath: string;
|
||||
/** Content: string, Buffer, Readable stream, SmartFile, or StreamFile */
|
||||
content: string | Buffer | stream.Readable | SmartFile | StreamFile;
|
||||
/** Content: string, Buffer/Uint8Array, SmartFile, or StreamFile */
|
||||
content: string | Uint8Array | SmartFile | StreamFile;
|
||||
/** Optional size hint for streams (improves performance) */
|
||||
size?: number;
|
||||
/** Optional file mode/permissions */
|
||||
@@ -104,11 +103,9 @@ export interface IAddFileOptions {
|
||||
/** Filename within the archive */
|
||||
fileName?: string;
|
||||
/** File content */
|
||||
content?: string | Buffer | stream.Readable | SmartFile | StreamFile;
|
||||
content?: string | Uint8Array | SmartFile | StreamFile;
|
||||
/** Size in bytes (required for streams) */
|
||||
byteLength?: number;
|
||||
/** Path to file on disk (alternative to content) */
|
||||
filePath?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -134,3 +131,17 @@ export interface IHuffmanGroup {
|
||||
* Entry filter predicate for fluent API
|
||||
*/
|
||||
export type TEntryFilter = (entry: IArchiveEntryInfo) => boolean;
|
||||
|
||||
/**
|
||||
* TAR entry for modern-tar compatibility
|
||||
*/
|
||||
export interface ITarEntry {
|
||||
header: {
|
||||
name: string;
|
||||
size: number;
|
||||
type?: 'file' | 'directory';
|
||||
mode?: number;
|
||||
mtime?: Date;
|
||||
};
|
||||
body: string | Uint8Array;
|
||||
}
|
||||
22
ts_shared/plugins.ts
Normal file
22
ts_shared/plugins.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
// Browser-compatible plugins for ts_shared
|
||||
// NO Node.js imports allowed here
|
||||
|
||||
// @push.rocks scope (browser-compatible)
|
||||
import * as smartdelay from '@push.rocks/smartdelay';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smartstream from '@push.rocks/smartstream';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
|
||||
export {
|
||||
smartdelay,
|
||||
smartpromise,
|
||||
smartstream,
|
||||
smartfile,
|
||||
};
|
||||
|
||||
// third party scope (browser-compatible)
|
||||
import * as fileType from 'file-type';
|
||||
import * as fflate from 'fflate';
|
||||
import * as modernTar from 'modern-tar';
|
||||
|
||||
export { fileType, fflate, modernTar };
|
||||
8
ts_web/00_commitinfo_data.ts
Normal file
8
ts_web/00_commitinfo_data.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
/**
|
||||
* autocreated commitinfo by @push.rocks/commitinfo
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartarchive',
|
||||
version: '5.2.0',
|
||||
description: 'A library for working with archive files, providing utilities for compressing and decompressing data.'
|
||||
}
|
||||
4
ts_web/index.ts
Normal file
4
ts_web/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
// ts_web - Browser-compatible entry point
|
||||
// Re-exports everything from ts_shared
|
||||
|
||||
export * from '../ts_shared/index.js';
|
||||
3
ts_web/plugins.ts
Normal file
3
ts_web/plugins.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
// Browser-compatible plugins for ts_web
|
||||
// Re-export from ts_shared
|
||||
export * from '../ts_shared/plugins.js';
|
||||
Reference in New Issue
Block a user