Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 648406d8b4 | |||
| db48fcd455 | |||
| d97e9c1dce | |||
| 6393527c95 | |||
| 4e3c5a8443 | |||
| 11bbddc763 |
Binary file not shown.
@@ -1,68 +0,0 @@
|
||||
# language of the project (csharp, python, rust, java, typescript, go, cpp, or ruby)
|
||||
# * For C, use cpp
|
||||
# * For JavaScript, use typescript
|
||||
# Special requirements:
|
||||
# * csharp: Requires the presence of a .sln file in the project folder.
|
||||
language: typescript
|
||||
|
||||
# whether to use the project's gitignore file to ignore files
|
||||
# Added on 2025-04-07
|
||||
ignore_all_files_in_gitignore: true
|
||||
# list of additional paths to ignore
|
||||
# same syntax as gitignore, so you can use * and **
|
||||
# Was previously called `ignored_dirs`, please update your config if you are using that.
|
||||
# Added (renamed) on 2025-04-07
|
||||
ignored_paths: []
|
||||
|
||||
# whether the project is in read-only mode
|
||||
# If set to true, all editing tools will be disabled and attempts to use them will result in an error
|
||||
# Added on 2025-04-18
|
||||
read_only: false
|
||||
|
||||
|
||||
# list of tool names to exclude. We recommend not excluding any tools, see the readme for more details.
|
||||
# Below is the complete list of tools for convenience.
|
||||
# To make sure you have the latest list of tools, and to view their descriptions,
|
||||
# execute `uv run scripts/print_tool_overview.py`.
|
||||
#
|
||||
# * `activate_project`: Activates a project by name.
|
||||
# * `check_onboarding_performed`: Checks whether project onboarding was already performed.
|
||||
# * `create_text_file`: Creates/overwrites a file in the project directory.
|
||||
# * `delete_lines`: Deletes a range of lines within a file.
|
||||
# * `delete_memory`: Deletes a memory from Serena's project-specific memory store.
|
||||
# * `execute_shell_command`: Executes a shell command.
|
||||
# * `find_referencing_code_snippets`: Finds code snippets in which the symbol at the given location is referenced.
|
||||
# * `find_referencing_symbols`: Finds symbols that reference the symbol at the given location (optionally filtered by type).
|
||||
# * `find_symbol`: Performs a global (or local) search for symbols with/containing a given name/substring (optionally filtered by type).
|
||||
# * `get_current_config`: Prints the current configuration of the agent, including the active and available projects, tools, contexts, and modes.
|
||||
# * `get_symbols_overview`: Gets an overview of the top-level symbols defined in a given file.
|
||||
# * `initial_instructions`: Gets the initial instructions for the current project.
|
||||
# Should only be used in settings where the system prompt cannot be set,
|
||||
# e.g. in clients you have no control over, like Claude Desktop.
|
||||
# * `insert_after_symbol`: Inserts content after the end of the definition of a given symbol.
|
||||
# * `insert_at_line`: Inserts content at a given line in a file.
|
||||
# * `insert_before_symbol`: Inserts content before the beginning of the definition of a given symbol.
|
||||
# * `list_dir`: Lists files and directories in the given directory (optionally with recursion).
|
||||
# * `list_memories`: Lists memories in Serena's project-specific memory store.
|
||||
# * `onboarding`: Performs onboarding (identifying the project structure and essential tasks, e.g. for testing or building).
|
||||
# * `prepare_for_new_conversation`: Provides instructions for preparing for a new conversation (in order to continue with the necessary context).
|
||||
# * `read_file`: Reads a file within the project directory.
|
||||
# * `read_memory`: Reads the memory with the given name from Serena's project-specific memory store.
|
||||
# * `remove_project`: Removes a project from the Serena configuration.
|
||||
# * `replace_lines`: Replaces a range of lines within a file with new content.
|
||||
# * `replace_symbol_body`: Replaces the full definition of a symbol.
|
||||
# * `restart_language_server`: Restarts the language server, may be necessary when edits not through Serena happen.
|
||||
# * `search_for_pattern`: Performs a search for a pattern in the project.
|
||||
# * `summarize_changes`: Provides instructions for summarizing the changes made to the codebase.
|
||||
# * `switch_modes`: Activates modes by providing a list of their names
|
||||
# * `think_about_collected_information`: Thinking tool for pondering the completeness of collected information.
|
||||
# * `think_about_task_adherence`: Thinking tool for determining whether the agent is still on track with the current task.
|
||||
# * `think_about_whether_you_are_done`: Thinking tool for determining whether the task is truly completed.
|
||||
# * `write_memory`: Writes a named memory (for future reference) to Serena's project-specific memory store.
|
||||
excluded_tools: []
|
||||
|
||||
# initial prompt for the project. It will always be given to the LLM upon activating the project
|
||||
# (contrary to the memories, which are loaded on demand).
|
||||
initial_prompt: ""
|
||||
|
||||
project_name: "smartarchive"
|
||||
32
changelog.md
32
changelog.md
@@ -1,5 +1,37 @@
|
||||
# Changelog
|
||||
|
||||
## 2026-01-01 - 5.2.0 - feat(tartools)
|
||||
add streaming TAR support (tar-stream), Node.js streaming APIs for TarTools, and browser / web bundle docs
|
||||
|
||||
- Add tar-stream runtime dependency and @types/tar-stream devDependency
|
||||
- Introduce streaming TarTools APIs: getPackStream, addFileToPack, getExtractStream, extractToDirectory, getDirectoryPackStream, getDirectoryPackStreamGz
|
||||
- Switch SmartArchive TAR extraction to use tar-stream extract for true streaming ingestion of entries
|
||||
- Export tarStream in plugins and export ITarPackFileOptions from the Node.js entrypoint
|
||||
- Update packDirectory/packDirectoryToTarGz to handle files safely and use fflate.gzipSync for buffer-based gzipping
|
||||
- README updates: document /web browser bundle, browser usage examples, Uint8Array guidance, updated feature table and streaming examples
|
||||
|
||||
## 2026-01-01 - 5.1.0 - feat(archive)
|
||||
introduce ts_shared browser-compatible layer, refactor Node-specific tools to wrap/shared implementations, and modernize archive handling
|
||||
|
||||
- Split code into ts_shared (browser-compatible) and ts_web entrypoint; node-specific wrappers remain under ts/
|
||||
- Switched TAR implementation from tar-stream to modern-tar and replaced stream-based TAR handling with Uint8Array-based pack/unpack
|
||||
- Normalized shared APIs to use Uint8Array (instead of Buffer) for browser compatibility — callers may need to adapt Buffer/Uint8Array usage (breaking)
|
||||
- Moved BZIP2, GZIP, ZIP, TAR logic into ts_shared and updated plugins to re-export shared plugins for web builds
|
||||
- Adjusted classes.smartarchive to consume shared tools and convert between Buffer and Uint8Array where needed
|
||||
- Added package.json exports for "." and "./web", bumped several dependency/devDependency versions, and added modern-tar and related changes
|
||||
- Updated npmextra.json with new scoped configuration and release registries
|
||||
- Removed pnpm-workspace.yaml entries and deleted several legacy Node-only files in favor of shared implementations
|
||||
|
||||
## 2025-11-25 - 5.0.1 - fix(ziptools,gziptools)
|
||||
Use fflate synchronous APIs for ZIP and GZIP operations for Deno compatibility; add TEntryFilter type and small docs/tests cleanup
|
||||
|
||||
- Replace fflate async APIs (zip, unzip, gzip, gunzip with callbacks) with synchronous counterparts (zipSync, unzipSync, gzipSync, gunzipSync) to avoid Web Worker issues in Deno
|
||||
- ZipCompressionStream.finalize now uses fflate.zipSync and emits compressed Buffer synchronously
|
||||
- GzipTools.compress / decompress now delegate to compressSync / decompressSync for cross-runtime compatibility
|
||||
- ZipTools.createZip and ZipTools.extractZip now use zipSync/unzipSync and return Buffers
|
||||
- Add TEntryFilter type to ts/interfaces.ts for fluent API entry filtering
|
||||
- Minor readme.hints.md updates and small whitespace tidy in tests
|
||||
|
||||
## 2025-11-25 - 5.0.0 - BREAKING CHANGE(SmartArchive)
|
||||
Refactor public API: rename factory/extraction methods, introduce typed interfaces and improved compression tools
|
||||
|
||||
|
||||
8
dist_ts/index.d.ts
vendored
8
dist_ts/index.d.ts
vendored
@@ -1,8 +1,4 @@
|
||||
export * from './interfaces.js';
|
||||
export * from './errors.js';
|
||||
export * from '../ts_shared/index.js';
|
||||
export * from './classes.smartarchive.js';
|
||||
export * from './classes.tartools.js';
|
||||
export * from './classes.ziptools.js';
|
||||
export * from './classes.gziptools.js';
|
||||
export * from './classes.bzip2tools.js';
|
||||
export * from './classes.archiveanalyzer.js';
|
||||
export { TarTools, type ITarPackFileOptions } from './classes.tartools.js';
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
// Core types and errors
|
||||
export * from './interfaces.js';
|
||||
export * from './errors.js';
|
||||
// Main archive class
|
||||
// Re-export everything from ts_shared (browser-compatible)
|
||||
export * from '../ts_shared/index.js';
|
||||
// Node.js-specific: Main archive class with filesystem support
|
||||
export * from './classes.smartarchive.js';
|
||||
// Format-specific tools
|
||||
export * from './classes.tartools.js';
|
||||
export * from './classes.ziptools.js';
|
||||
export * from './classes.gziptools.js';
|
||||
export * from './classes.bzip2tools.js';
|
||||
// Archive analysis
|
||||
// Node.js-specific: Archive analysis with SmartArchive integration
|
||||
export * from './classes.archiveanalyzer.js';
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSx3QkFBd0I7QUFDeEIsY0FBYyxpQkFBaUIsQ0FBQztBQUNoQyxjQUFjLGFBQWEsQ0FBQztBQUU1QixxQkFBcUI7QUFDckIsY0FBYywyQkFBMkIsQ0FBQztBQUUxQyx3QkFBd0I7QUFDeEIsY0FBYyx1QkFBdUIsQ0FBQztBQUN0QyxjQUFjLHVCQUF1QixDQUFDO0FBQ3RDLGNBQWMsd0JBQXdCLENBQUM7QUFDdkMsY0FBYyx5QkFBeUIsQ0FBQztBQUV4QyxtQkFBbUI7QUFDbkIsY0FBYyw4QkFBOEIsQ0FBQyJ9
|
||||
// Node.js-specific: Extended TarTools with streaming support (overrides shared TarTools)
|
||||
export { TarTools } from './classes.tartools.js';
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi90cy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSwyREFBMkQ7QUFDM0QsY0FBYyx1QkFBdUIsQ0FBQztBQUV0QywrREFBK0Q7QUFDL0QsY0FBYywyQkFBMkIsQ0FBQztBQUUxQyxtRUFBbUU7QUFDbkUsY0FBYyw4QkFBOEIsQ0FBQztBQUU3Qyx5RkFBeUY7QUFDekYsT0FBTyxFQUFFLFFBQVEsRUFBNEIsTUFBTSx1QkFBdUIsQ0FBQyJ9
|
||||
34
dist_ts/smartarchive.classes.smartarchive.d.ts
vendored
34
dist_ts/smartarchive.classes.smartarchive.d.ts
vendored
@@ -1,34 +0,0 @@
|
||||
/// <reference types="node" resolution-mode="require"/>
|
||||
import * as plugins from './smartarchive.plugins.js';
|
||||
export declare class SmartArchive {
|
||||
constructor();
|
||||
/**
|
||||
* extracts an archive from a given url
|
||||
*/
|
||||
extractArchiveFromUrlToFs(urlArg: string, targetDir: string): Promise<void>;
|
||||
/**
|
||||
* extracts an archive from a given filePath on disk
|
||||
* @param filePathArg
|
||||
* @param targetDirArg
|
||||
*/
|
||||
extractArchiveFromFilePathToFs(filePathArg: string, targetDirArg: string): Promise<void>;
|
||||
/**
|
||||
* extracts to Observable
|
||||
* where the Observable is emitting smartfiles
|
||||
*/
|
||||
extractArchiveFromBufferToObservable(bufferArg: Buffer): Promise<plugins.smartrx.rxjs.ReplaySubject<plugins.smartfile.Smartfile>>;
|
||||
extractArchiveWithIntakeAndReplaySubject(): {
|
||||
intake: plugins.smartstream.StreamIntake<Buffer>;
|
||||
replaySubject: plugins.smartrx.rxjs.ReplaySubject<plugins.smartfile.Smartfile>;
|
||||
};
|
||||
/**
|
||||
* extracts to Observable
|
||||
*/
|
||||
extractArchiveFromUrlToObservable(urlArg: string): Promise<plugins.smartrx.rxjs.ReplaySubject<plugins.smartfile.Smartfile>>;
|
||||
extractArchiveFromUrlToStream(): Promise<void>;
|
||||
extractArchiveFromFilePathToStream(): Promise<void>;
|
||||
extractArchiveFromStreamToStream(): Promise<void>;
|
||||
packFromStreamToStream(): Promise<void>;
|
||||
packFromDirPathToStream(): Promise<void>;
|
||||
packFromDirPathToFs(): Promise<void>;
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
2
dist_ts/smartarchive.paths.d.ts
vendored
2
dist_ts/smartarchive.paths.d.ts
vendored
@@ -1,2 +0,0 @@
|
||||
export declare const packageDir: string;
|
||||
export declare const nogitDir: string;
|
||||
@@ -1,4 +0,0 @@
|
||||
import * as plugins from './smartarchive.plugins.js';
|
||||
export const packageDir = plugins.path.join(plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url), '../');
|
||||
export const nogitDir = plugins.path.join(packageDir, './.nogit');
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic21hcnRhcmNoaXZlLnBhdGhzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vdHMvc21hcnRhcmNoaXZlLnBhdGhzLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sS0FBSyxPQUFPLE1BQU0sMkJBQTJCLENBQUM7QUFFckQsTUFBTSxDQUFDLE1BQU0sVUFBVSxHQUFHLE9BQU8sQ0FBQyxJQUFJLENBQUMsSUFBSSxDQUN6QyxPQUFPLENBQUMsU0FBUyxDQUFDLEdBQUcsQ0FBQyx3QkFBd0IsQ0FBQyxNQUFNLENBQUMsSUFBSSxDQUFDLEdBQUcsQ0FBQyxFQUMvRCxLQUFLLENBQ04sQ0FBQztBQUNGLE1BQU0sQ0FBQyxNQUFNLFFBQVEsR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxVQUFVLEVBQUUsVUFBVSxDQUFDLENBQUMifQ==
|
||||
14
dist_ts/smartarchive.plugins.d.ts
vendored
14
dist_ts/smartarchive.plugins.d.ts
vendored
@@ -1,14 +0,0 @@
|
||||
import * as path from 'path';
|
||||
export { path };
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smartrequest from '@push.rocks/smartrequest';
|
||||
import * as smartunique from '@push.rocks/smartunique';
|
||||
import * as smartstream from '@push.rocks/smartstream';
|
||||
import * as smartrx from '@push.rocks/smartrx';
|
||||
export { smartfile, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx };
|
||||
import gunzipMaybe from 'gunzip-maybe';
|
||||
import tar from 'tar';
|
||||
import tarStream from 'tar-stream';
|
||||
export { gunzipMaybe, tar, tarStream };
|
||||
@@ -1,19 +0,0 @@
|
||||
// node native scope
|
||||
import * as path from 'path';
|
||||
export { path };
|
||||
// @pushrocks scope
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smartrequest from '@push.rocks/smartrequest';
|
||||
import * as smartunique from '@push.rocks/smartunique';
|
||||
import * as smartstream from '@push.rocks/smartstream';
|
||||
import * as smartrx from '@push.rocks/smartrx';
|
||||
export { smartfile, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx };
|
||||
// third party scope
|
||||
import gunzipMaybe from 'gunzip-maybe';
|
||||
// @ts-ignore
|
||||
import tar from 'tar';
|
||||
import tarStream from 'tar-stream';
|
||||
export { gunzipMaybe, tar, tarStream };
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic21hcnRhcmNoaXZlLnBsdWdpbnMuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9zbWFydGFyY2hpdmUucGx1Z2lucy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxvQkFBb0I7QUFDcEIsT0FBTyxLQUFLLElBQUksTUFBTSxNQUFNLENBQUM7QUFFN0IsT0FBTyxFQUFFLElBQUksRUFBRSxDQUFDO0FBRWhCLG1CQUFtQjtBQUNuQixPQUFPLEtBQUssU0FBUyxNQUFNLHVCQUF1QixDQUFDO0FBQ25ELE9BQU8sS0FBSyxTQUFTLE1BQU0sdUJBQXVCLENBQUM7QUFDbkQsT0FBTyxLQUFLLFlBQVksTUFBTSwwQkFBMEIsQ0FBQztBQUN6RCxPQUFPLEtBQUssWUFBWSxNQUFNLDBCQUEwQixDQUFDO0FBQ3pELE9BQU8sS0FBSyxXQUFXLE1BQU0seUJBQXlCLENBQUM7QUFDdkQsT0FBTyxLQUFLLFdBQVcsTUFBTSx5QkFBeUIsQ0FBQztBQUN2RCxPQUFPLEtBQUssT0FBTyxNQUFNLHFCQUFxQixDQUFDO0FBRS9DLE9BQU8sRUFBRSxTQUFTLEVBQUUsU0FBUyxFQUFFLFlBQVksRUFBRSxZQUFZLEVBQUUsV0FBVyxFQUFFLFdBQVcsRUFBRSxPQUFPLEVBQUUsQ0FBQztBQUUvRixvQkFBb0I7QUFDcEIsT0FBTyxXQUFXLE1BQU0sY0FBYyxDQUFDO0FBRXZDLGFBQWE7QUFDYixPQUFPLEdBQUcsTUFBTSxLQUFLLENBQUM7QUFDdEIsT0FBTyxTQUFTLE1BQU0sWUFBWSxDQUFDO0FBRW5DLE9BQU8sRUFBRSxXQUFXLEVBQUUsR0FBRyxFQUFFLFNBQVMsRUFBRSxDQUFDIn0=
|
||||
@@ -1,9 +1,5 @@
|
||||
{
|
||||
"npmci": {
|
||||
"npmGlobalTools": [],
|
||||
"npmAccessLevel": "public"
|
||||
},
|
||||
"gitzone": {
|
||||
"@git.zone/cli": {
|
||||
"projectType": "npm",
|
||||
"module": {
|
||||
"githost": "code.foss.global",
|
||||
@@ -25,9 +21,19 @@
|
||||
"data analysis",
|
||||
"file stream"
|
||||
]
|
||||
},
|
||||
"release": {
|
||||
"registries": [
|
||||
"https://verdaccio.lossless.digital",
|
||||
"https://registry.npmjs.org"
|
||||
],
|
||||
"accessLevel": "public"
|
||||
}
|
||||
},
|
||||
"tsdoc": {
|
||||
"@git.zone/tsdoc": {
|
||||
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
||||
},
|
||||
"@ship.zone/szci": {
|
||||
"npmGlobalTools": []
|
||||
}
|
||||
}
|
||||
}
|
||||
23
package.json
23
package.json
@@ -1,10 +1,14 @@
|
||||
{
|
||||
"name": "@push.rocks/smartarchive",
|
||||
"version": "5.0.0",
|
||||
"version": "5.2.0",
|
||||
"description": "A library for working with archive files, providing utilities for compressing and decompressing data.",
|
||||
"main": "dist_ts/index.js",
|
||||
"typings": "dist_ts/index.d.ts",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": "./dist_ts/index.js",
|
||||
"./web": "./dist_ts_web/index.js"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "(tstest test/ --verbose)",
|
||||
"build": "tsbuild --web --allowimplicitany",
|
||||
@@ -22,31 +26,34 @@
|
||||
"homepage": "https://code.foss.global/push.rocks/smartarchive#readme",
|
||||
"dependencies": {
|
||||
"@push.rocks/smartdelay": "^3.0.5",
|
||||
"@push.rocks/smartfile": "^13.0.0",
|
||||
"@push.rocks/smartfile": "^13.1.2",
|
||||
"@push.rocks/smartpath": "^6.0.0",
|
||||
"@push.rocks/smartpromise": "^4.2.3",
|
||||
"@push.rocks/smartrequest": "^4.2.2",
|
||||
"@push.rocks/smartrequest": "^5.0.1",
|
||||
"@push.rocks/smartrx": "^3.0.10",
|
||||
"@push.rocks/smartstream": "^3.2.5",
|
||||
"@push.rocks/smartunique": "^3.0.9",
|
||||
"@push.rocks/smarturl": "^3.1.0",
|
||||
"@types/tar-stream": "^3.1.4",
|
||||
"fflate": "^0.8.2",
|
||||
"file-type": "^21.0.0",
|
||||
"file-type": "^21.2.0",
|
||||
"modern-tar": "^0.7.3",
|
||||
"tar-stream": "^3.1.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@git.zone/tsbuild": "^3.1.0",
|
||||
"@git.zone/tsrun": "^2.0.0",
|
||||
"@git.zone/tstest": "^3.1.3"
|
||||
"@git.zone/tsbuild": "^4.0.2",
|
||||
"@git.zone/tsrun": "^2.0.1",
|
||||
"@git.zone/tstest": "^3.1.4",
|
||||
"@types/tar-stream": "^3.1.3"
|
||||
},
|
||||
"private": false,
|
||||
"files": [
|
||||
"ts/**/*",
|
||||
"ts_shared/**/*",
|
||||
"ts_web/**/*",
|
||||
"dist/**/*",
|
||||
"dist_*/**/*",
|
||||
"dist_ts/**/*",
|
||||
"dist_ts_shared/**/*",
|
||||
"dist_ts_web/**/*",
|
||||
"assets/**/*",
|
||||
"cli.js",
|
||||
|
||||
5087
pnpm-lock.yaml
generated
5087
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -1,4 +0,0 @@
|
||||
onlyBuiltDependencies:
|
||||
- esbuild
|
||||
- mongodb-memory-server
|
||||
- puppeteer
|
||||
@@ -1,38 +1,84 @@
|
||||
# Smartarchive Development Hints
|
||||
|
||||
## Dependency Upgrades (2025-01-25)
|
||||
## Architecture Overview
|
||||
|
||||
### Completed Upgrades
|
||||
- **@git.zone/tsbuild**: ^2.6.6 → ^3.1.0
|
||||
- **@git.zone/tsrun**: ^1.3.3 → ^2.0.0
|
||||
- **@git.zone/tstest**: ^2.3.4 → ^3.1.3
|
||||
- **@push.rocks/smartfile**: ^11.2.7 → ^13.0.0
|
||||
`@push.rocks/smartarchive` uses a **fluent builder pattern** for all archive operations. The main entry point is `SmartArchive.create()` which returns a builder instance.
|
||||
|
||||
### Migration Notes
|
||||
### Two Operating Modes
|
||||
|
||||
#### Smartfile v13 Migration
|
||||
Smartfile v13 removed filesystem operations (`fs`, `memory`, `fsStream` namespaces). These were replaced with Node.js native `fs` and `fs/promises`:
|
||||
1. **Extraction Mode** - Triggered by `.url()`, `.file()`, `.stream()`, or `.buffer()`
|
||||
2. **Creation Mode** - Triggered by `.format()` or `.entry()`
|
||||
|
||||
**Replacements made:**
|
||||
Modes are mutually exclusive - you cannot mix extraction and creation methods in the same chain.
|
||||
|
||||
## Key Classes
|
||||
|
||||
- **SmartArchive** - Main class with fluent API for all operations
|
||||
- **TarTools** - TAR-specific operations (pack/extract)
|
||||
- **ZipTools** - ZIP-specific operations using fflate
|
||||
- **GzipTools** - GZIP compression/decompression using fflate
|
||||
- **Bzip2Tools** - BZIP2 decompression (extract only, no creation)
|
||||
- **ArchiveAnalyzer** - Format detection via magic bytes
|
||||
|
||||
## Dependencies
|
||||
|
||||
- **fflate** - Pure JS compression for ZIP/GZIP (works in browser)
|
||||
- **tar-stream** - TAR archive handling
|
||||
- **file-type** - MIME type detection via magic bytes
|
||||
- **@push.rocks/smartfile** - SmartFile and StreamFile classes
|
||||
|
||||
## API Changes (v5.0.0)
|
||||
|
||||
The v5.0.0 release introduced a complete API refactor:
|
||||
|
||||
### Old API (deprecated)
|
||||
```typescript
|
||||
// Old static factory methods - NO LONGER EXIST
|
||||
await SmartArchive.fromUrl(url);
|
||||
await SmartArchive.fromFile(path);
|
||||
await SmartArchive.fromDirectory(path, options);
|
||||
```
|
||||
|
||||
### New Fluent API
|
||||
```typescript
|
||||
// Current fluent builder pattern
|
||||
await SmartArchive.create()
|
||||
.url(url)
|
||||
.extract(targetDir);
|
||||
|
||||
await SmartArchive.create()
|
||||
.format('tar.gz')
|
||||
.directory(path)
|
||||
.toFile(outputPath);
|
||||
```
|
||||
|
||||
## Migration Notes (from v4.x)
|
||||
|
||||
### Smartfile v13 Changes
|
||||
Smartfile v13 removed filesystem operations. Replacements:
|
||||
- `smartfile.fs.ensureDir(path)` → `fsPromises.mkdir(path, { recursive: true })`
|
||||
- `smartfile.fs.stat(path)` → `fsPromises.stat(path)`
|
||||
- `smartfile.fs.toReadStream(path)` → `fs.createReadStream(path)`
|
||||
- `smartfile.fs.toStringSync(path)` → `fsPromises.readFile(path, 'utf8')`
|
||||
- `smartfile.fs.listFileTree(dir, pattern)` → custom `listFileTree()` helper
|
||||
- `smartfile.fsStream.createReadStream(path)` → `fs.createReadStream(path)`
|
||||
- `smartfile.fsStream.createWriteStream(path)` → `fs.createWriteStream(path)`
|
||||
- `smartfile.memory.toFs(content, path)` → `fsPromises.writeFile(path, content)`
|
||||
|
||||
**Still using from smartfile v13:**
|
||||
### Still using from smartfile
|
||||
- `SmartFile` class (in-memory file representation)
|
||||
- `StreamFile` class (streaming file handling)
|
||||
|
||||
### Removed Dependencies
|
||||
- `through@2.3.8` - was unused in the codebase
|
||||
## Testing
|
||||
|
||||
## Architecture Notes
|
||||
```bash
|
||||
pnpm test # Run all tests
|
||||
tstest test/test.node+deno.ts --verbose # Run specific test
|
||||
```
|
||||
|
||||
- Uses `fflate` for ZIP/GZIP compression (pure JS, works in browser)
|
||||
- Uses `tar-stream` for TAR archive handling
|
||||
- Uses `file-type` for MIME type detection
|
||||
- Custom BZIP2 implementation in `ts/bzip2/` directory
|
||||
Tests use a Verdaccio registry URL (`verdaccio.lossless.digital`) for test archives.
|
||||
|
||||
## Key Files
|
||||
|
||||
- `ts/classes.smartarchive.ts` - Main SmartArchive class with fluent API
|
||||
- `ts/classes.tartools.ts` - TAR operations
|
||||
- `ts/classes.ziptools.ts` - ZIP operations
|
||||
- `ts/classes.gziptools.ts` - GZIP operations
|
||||
- `ts/classes.bzip2tools.ts` - BZIP2 decompression
|
||||
- `ts/classes.archiveanalyzer.ts` - Format detection
|
||||
- `ts/interfaces.ts` - Type definitions
|
||||
|
||||
836
readme.md
836
readme.md
@@ -1,8 +1,6 @@
|
||||
# @push.rocks/smartarchive 📦
|
||||
|
||||
Powerful archive manipulation for modern Node.js applications.
|
||||
|
||||
`@push.rocks/smartarchive` is a versatile library for handling archive files with a focus on developer experience. Work with **zip**, **tar**, **gzip**, and **bzip2** formats through a unified, streaming-optimized API.
|
||||
A powerful, streaming-first archive manipulation library with a fluent builder API. Works seamlessly in **Node.js**, **Deno**, and **browsers**.
|
||||
|
||||
## Issue Reporting and Security
|
||||
|
||||
@@ -10,13 +8,16 @@ For reporting bugs, issues, or security vulnerabilities, please visit [community
|
||||
|
||||
## Features 🚀
|
||||
|
||||
- 📁 **Multi-format support** – Handle `.zip`, `.tar`, `.tar.gz`, `.tgz`, and `.bz2` archives
|
||||
- 📁 **Multi-format support** – Handle `.zip`, `.tar`, `.tar.gz`, `.tgz`, `.gz`, and `.bz2` archives
|
||||
- 🌊 **Streaming-first architecture** – Process large archives without memory constraints
|
||||
- 🔄 **Unified API** – Consistent interface across different archive formats
|
||||
- ✨ **Fluent builder API** – Chain methods for readable, expressive code
|
||||
- 🎯 **Smart detection** – Automatically identifies archive types via magic bytes
|
||||
- ⚡ **High performance** – Built on `tar-stream` and `fflate` for speed
|
||||
- 🔧 **Flexible I/O** – Work with files, URLs, and streams seamlessly
|
||||
- ⚡ **High performance** – Built on `modern-tar` and `fflate` for speed
|
||||
- 🔧 **Flexible I/O** – Work with files, URLs, streams, and buffers seamlessly
|
||||
- 🛠️ **Modern TypeScript** – Full type safety and excellent IDE support
|
||||
- 🔄 **Dual-mode operation** – Extract existing archives OR create new ones
|
||||
- 🦕 **Cross-runtime** – Works in Node.js, Deno, and browsers
|
||||
- 🌐 **Browser-ready** – Dedicated browser bundle with zero Node.js dependencies
|
||||
|
||||
## Installation 📥
|
||||
|
||||
@@ -39,354 +40,457 @@ yarn add @push.rocks/smartarchive
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
// Extract a .tar.gz archive from a URL directly to the filesystem
|
||||
const archive = await SmartArchive.fromArchiveUrl(
|
||||
'https://registry.npmjs.org/some-package/-/some-package-1.0.0.tgz'
|
||||
);
|
||||
await archive.exportToFs('./extracted');
|
||||
await SmartArchive.create()
|
||||
.url('https://registry.npmjs.org/some-package/-/some-package-1.0.0.tgz')
|
||||
.extract('./extracted');
|
||||
```
|
||||
|
||||
### Process archive as a stream
|
||||
### Create an archive from entries
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
// Stream-based processing for memory efficiency
|
||||
const archive = await SmartArchive.fromArchiveFile('./large-archive.zip');
|
||||
const streamOfFiles = await archive.exportToStreamOfStreamFiles();
|
||||
// Create a tar.gz archive with files
|
||||
await SmartArchive.create()
|
||||
.format('tar.gz')
|
||||
.compression(6)
|
||||
.entry('config.json', JSON.stringify({ name: 'myapp' }))
|
||||
.entry('readme.txt', 'Hello World!')
|
||||
.toFile('./backup.tar.gz');
|
||||
```
|
||||
|
||||
// Process each file in the archive
|
||||
streamOfFiles.on('data', async (streamFile) => {
|
||||
console.log(`Processing ${streamFile.relativeFilePath}`);
|
||||
const readStream = await streamFile.createReadStream();
|
||||
// Handle individual file stream
|
||||
### Extract with filtering and path manipulation
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
// Extract only JSON files, stripping the first path component
|
||||
await SmartArchive.create()
|
||||
.url('https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz')
|
||||
.stripComponents(1) // Remove 'package/' prefix
|
||||
.include(/\.json$/) // Only extract JSON files
|
||||
.extract('./node_modules/lodash');
|
||||
```
|
||||
|
||||
## Browser Usage 🌐
|
||||
|
||||
smartarchive provides a dedicated browser-compatible bundle with no Node.js dependencies:
|
||||
|
||||
```typescript
|
||||
// Import from the /web subpath for browser environments
|
||||
import { TarTools, ZipTools, GzipTools, Bzip2Tools } from '@push.rocks/smartarchive/web';
|
||||
|
||||
// Create a TAR archive in the browser
|
||||
const tarTools = new TarTools();
|
||||
const tarBuffer = await tarTools.packFiles([
|
||||
{ archivePath: 'hello.txt', content: 'Hello from the browser!' },
|
||||
{ archivePath: 'data.json', content: JSON.stringify({ browser: true }) }
|
||||
]);
|
||||
|
||||
// Create a TAR.GZ archive
|
||||
const tgzBuffer = await tarTools.packFilesToTarGz([
|
||||
{ archivePath: 'file.txt', content: 'Compressed!' }
|
||||
], 6);
|
||||
|
||||
// Extract a TAR archive
|
||||
const entries = await tarTools.extractTar(tarBuffer);
|
||||
for (const entry of entries) {
|
||||
console.log(`${entry.path}: ${entry.content.length} bytes`);
|
||||
}
|
||||
|
||||
// Work with ZIP files
|
||||
const zipTools = new ZipTools();
|
||||
const zipBuffer = await zipTools.createZip([
|
||||
{ archivePath: 'doc.txt', content: 'Document content' }
|
||||
], 6);
|
||||
|
||||
const zipEntries = await zipTools.extractZip(zipBuffer);
|
||||
|
||||
// GZIP compression
|
||||
const gzipTools = new GzipTools();
|
||||
const compressed = gzipTools.compressSync(new TextEncoder().encode('Hello World'), 6);
|
||||
const decompressed = gzipTools.decompressSync(compressed);
|
||||
```
|
||||
|
||||
### Browser Bundle Exports
|
||||
|
||||
The `/web` subpath exports these browser-compatible tools:
|
||||
|
||||
| Export | Description |
|
||||
|--------|-------------|
|
||||
| `TarTools` | Create and extract TAR and TAR.GZ archives |
|
||||
| `ZipTools` | Create and extract ZIP archives |
|
||||
| `GzipTools` | GZIP compression and decompression |
|
||||
| `Bzip2Tools` | BZIP2 decompression (extraction only) |
|
||||
|
||||
> 💡 **Note:** The browser bundle does **not** include `SmartArchive` (which requires filesystem access). Use the individual tool classes for browser applications.
|
||||
|
||||
## Core Concepts 💡
|
||||
|
||||
### Fluent Builder Pattern
|
||||
|
||||
`SmartArchive` uses a fluent builder pattern where you chain methods to configure the operation:
|
||||
|
||||
```typescript
|
||||
SmartArchive.create() // Start a new builder
|
||||
.source(...) // Configure source (extraction mode)
|
||||
.options(...) // Set options
|
||||
.terminal() // Execute the operation
|
||||
```
|
||||
|
||||
### Two Operating Modes
|
||||
|
||||
**Extraction Mode** - Load an existing archive and extract/analyze it:
|
||||
```typescript
|
||||
SmartArchive.create()
|
||||
.url('...') // or .file(), .stream(), .buffer()
|
||||
.extract('./out') // or .toSmartFiles(), .list(), etc.
|
||||
```
|
||||
|
||||
**Creation Mode** - Build a new archive from entries:
|
||||
```typescript
|
||||
SmartArchive.create()
|
||||
.format('tar.gz') // Set output format
|
||||
.entry(...) // Add files
|
||||
.toFile('./out.tar.gz') // or .toBuffer(), .toStream()
|
||||
```
|
||||
|
||||
> ⚠️ **Note:** You cannot mix extraction and creation methods in the same chain.
|
||||
|
||||
## API Reference 📚
|
||||
|
||||
### Source Methods (Extraction Mode)
|
||||
|
||||
| Method | Description |
|
||||
|--------|-------------|
|
||||
| `.url(url)` | Load archive from a URL |
|
||||
| `.file(path)` | Load archive from local filesystem |
|
||||
| `.stream(readable)` | Load archive from any Node.js readable stream |
|
||||
| `.buffer(buffer)` | Load archive from an in-memory Buffer |
|
||||
|
||||
### Creation Methods (Creation Mode)
|
||||
|
||||
| Method | Description |
|
||||
|--------|-------------|
|
||||
| `.format(fmt)` | Set output format: `'tar'`, `'tar.gz'`, `'tgz'`, `'zip'`, `'gz'` |
|
||||
| `.compression(level)` | Set compression level (0-9, default: 6) |
|
||||
| `.entry(path, content)` | Add a file entry (string or Buffer content) |
|
||||
| `.entries(array)` | Add multiple entries at once |
|
||||
| `.directory(path, archiveBase?)` | Add entire directory contents |
|
||||
| `.addSmartFile(file, path?)` | Add a SmartFile instance |
|
||||
| `.addStreamFile(file, path?)` | Add a StreamFile instance |
|
||||
|
||||
### Filter Methods (Both Modes)
|
||||
|
||||
| Method | Description |
|
||||
|--------|-------------|
|
||||
| `.filter(predicate)` | Filter entries with custom function |
|
||||
| `.include(pattern)` | Only include entries matching regex/string pattern |
|
||||
| `.exclude(pattern)` | Exclude entries matching regex/string pattern |
|
||||
|
||||
### Extraction Options
|
||||
|
||||
| Method | Description |
|
||||
|--------|-------------|
|
||||
| `.stripComponents(n)` | Strip N leading path components |
|
||||
| `.overwrite(bool)` | Overwrite existing files (default: false) |
|
||||
| `.fileName(name)` | Set output filename for single-file archives (gz, bz2) |
|
||||
|
||||
### Terminal Methods (Extraction)
|
||||
|
||||
| Method | Returns | Description |
|
||||
|--------|---------|-------------|
|
||||
| `.extract(targetDir)` | `Promise<void>` | Extract to filesystem directory |
|
||||
| `.toStreamFiles()` | `Promise<StreamIntake<StreamFile>>` | Get stream of StreamFile objects |
|
||||
| `.toSmartFiles()` | `Promise<SmartFile[]>` | Get in-memory SmartFile array |
|
||||
| `.extractFile(path)` | `Promise<SmartFile \| null>` | Extract single file by path |
|
||||
| `.list()` | `Promise<IArchiveEntryInfo[]>` | List all entries |
|
||||
| `.analyze()` | `Promise<IArchiveInfo>` | Get archive metadata |
|
||||
| `.hasFile(path)` | `Promise<boolean>` | Check if file exists |
|
||||
|
||||
### Terminal Methods (Creation)
|
||||
|
||||
| Method | Returns | Description |
|
||||
|--------|---------|-------------|
|
||||
| `.build()` | `Promise<SmartArchive>` | Build the archive (implicit in other terminals) |
|
||||
| `.toBuffer()` | `Promise<Buffer>` | Get archive as Buffer |
|
||||
| `.toFile(path)` | `Promise<void>` | Write archive to disk |
|
||||
| `.toStream()` | `Promise<Readable>` | Get raw archive stream |
|
||||
|
||||
## Usage Examples 🔨
|
||||
|
||||
### Download and extract npm packages
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
const pkg = await SmartArchive.create()
|
||||
.url('https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz');
|
||||
|
||||
// Quick inspection of package.json
|
||||
const pkgJson = await pkg.extractFile('package/package.json');
|
||||
if (pkgJson) {
|
||||
const metadata = JSON.parse(pkgJson.contents.toString());
|
||||
console.log(`Package: ${metadata.name}@${metadata.version}`);
|
||||
}
|
||||
|
||||
// Full extraction with path normalization
|
||||
await SmartArchive.create()
|
||||
.url('https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz')
|
||||
.stripComponents(1)
|
||||
.extract('./node_modules/lodash');
|
||||
```
|
||||
|
||||
### Create ZIP archive
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
await SmartArchive.create()
|
||||
.format('zip')
|
||||
.compression(9)
|
||||
.entry('report.txt', 'Monthly sales report...')
|
||||
.entry('data/figures.json', JSON.stringify({ revenue: 10000 }))
|
||||
.entry('images/logo.png', pngBuffer)
|
||||
.toFile('./report-bundle.zip');
|
||||
```
|
||||
|
||||
### Create TAR.GZ from directory
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
await SmartArchive.create()
|
||||
.format('tar.gz')
|
||||
.compression(9)
|
||||
.directory('./src', 'source') // Archive ./src as 'source/' in archive
|
||||
.toFile('./project-backup.tar.gz');
|
||||
```
|
||||
|
||||
### Stream-based extraction
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
const fileStream = await SmartArchive.create()
|
||||
.file('./large-archive.tar.gz')
|
||||
.toStreamFiles();
|
||||
|
||||
fileStream.on('data', async (streamFile) => {
|
||||
console.log(`Processing: ${streamFile.relativeFilePath}`);
|
||||
|
||||
if (streamFile.relativeFilePath.endsWith('.json')) {
|
||||
const content = await streamFile.getContentAsBuffer();
|
||||
const data = JSON.parse(content.toString());
|
||||
// Process JSON data...
|
||||
}
|
||||
});
|
||||
|
||||
streamOfFiles.on('end', () => {
|
||||
fileStream.on('end', () => {
|
||||
console.log('Extraction complete');
|
||||
});
|
||||
```
|
||||
|
||||
## Core Concepts 💡
|
||||
|
||||
### Archive Sources
|
||||
|
||||
`SmartArchive` accepts archives from three sources:
|
||||
|
||||
| Source | Method | Use Case |
|
||||
|--------|--------|----------|
|
||||
| **URL** | `SmartArchive.fromArchiveUrl(url)` | Download and process archives from the web |
|
||||
| **File** | `SmartArchive.fromArchiveFile(path)` | Load archives from the local filesystem |
|
||||
| **Stream** | `SmartArchive.fromArchiveStream(stream)` | Process archives from any Node.js stream |
|
||||
|
||||
### Export Destinations
|
||||
|
||||
| Destination | Method | Use Case |
|
||||
|-------------|--------|----------|
|
||||
| **Filesystem** | `exportToFs(targetDir, fileName?)` | Extract directly to a directory |
|
||||
| **Stream of files** | `exportToStreamOfStreamFiles()` | Process files individually as `StreamFile` objects |
|
||||
|
||||
## Usage Examples 🔨
|
||||
|
||||
### Working with ZIP files
|
||||
### Filter specific file types
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
// Extract a ZIP file
|
||||
const zipArchive = await SmartArchive.fromArchiveFile('./archive.zip');
|
||||
await zipArchive.exportToFs('./output');
|
||||
// Extract only TypeScript files
|
||||
const tsFiles = await SmartArchive.create()
|
||||
.url('https://example.com/project.tar.gz')
|
||||
.include(/\.ts$/)
|
||||
.exclude(/node_modules/)
|
||||
.toSmartFiles();
|
||||
|
||||
// Stream ZIP contents for processing
|
||||
const fileStream = await zipArchive.exportToStreamOfStreamFiles();
|
||||
|
||||
fileStream.on('data', async (streamFile) => {
|
||||
if (streamFile.relativeFilePath.endsWith('.json')) {
|
||||
const readStream = await streamFile.createReadStream();
|
||||
// Process JSON files from the archive
|
||||
}
|
||||
});
|
||||
for (const file of tsFiles) {
|
||||
console.log(`${file.relative}: ${file.contents.length} bytes`);
|
||||
}
|
||||
```
|
||||
|
||||
### Working with TAR archives
|
||||
|
||||
```typescript
|
||||
import { SmartArchive, TarTools } from '@push.rocks/smartarchive';
|
||||
|
||||
// Extract a .tar.gz file
|
||||
const tarGzArchive = await SmartArchive.fromArchiveFile('./archive.tar.gz');
|
||||
await tarGzArchive.exportToFs('./extracted');
|
||||
|
||||
// Create a TAR archive using TarTools directly
|
||||
const tarTools = new TarTools();
|
||||
const pack = await tarTools.getPackStream();
|
||||
|
||||
// Add files to the pack
|
||||
await tarTools.addFileToPack(pack, {
|
||||
fileName: 'hello.txt',
|
||||
content: 'Hello, World!'
|
||||
});
|
||||
|
||||
await tarTools.addFileToPack(pack, {
|
||||
fileName: 'data.json',
|
||||
content: Buffer.from(JSON.stringify({ foo: 'bar' }))
|
||||
});
|
||||
|
||||
// Finalize and pipe to destination
|
||||
pack.finalize();
|
||||
pack.pipe(createWriteStream('./output.tar'));
|
||||
```
|
||||
|
||||
### Pack a directory into TAR
|
||||
|
||||
```typescript
|
||||
import { TarTools } from '@push.rocks/smartarchive';
|
||||
import { createWriteStream } from 'fs';
|
||||
|
||||
const tarTools = new TarTools();
|
||||
|
||||
// Pack an entire directory
|
||||
const pack = await tarTools.packDirectory('./src');
|
||||
pack.finalize();
|
||||
pack.pipe(createWriteStream('./source.tar'));
|
||||
```
|
||||
|
||||
### Extracting from URLs
|
||||
### Analyze archive without extraction
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
// Download and extract npm packages
|
||||
const npmPackage = await SmartArchive.fromArchiveUrl(
|
||||
'https://registry.npmjs.org/@push.rocks/smartfile/-/smartfile-11.2.7.tgz'
|
||||
);
|
||||
await npmPackage.exportToFs('./node_modules/@push.rocks/smartfile');
|
||||
const archive = SmartArchive.create()
|
||||
.file('./unknown-archive.tar.gz');
|
||||
|
||||
// Or process as stream for memory efficiency
|
||||
const stream = await npmPackage.exportToStreamOfStreamFiles();
|
||||
stream.on('data', async (file) => {
|
||||
console.log(`Extracted: ${file.relativeFilePath}`);
|
||||
});
|
||||
// Get format info
|
||||
const info = await archive.analyze();
|
||||
console.log(`Format: ${info.format}`);
|
||||
console.log(`Compressed: ${info.isCompressed}`);
|
||||
|
||||
// List contents
|
||||
const entries = await archive.list();
|
||||
for (const entry of entries) {
|
||||
console.log(`${entry.path} (${entry.isDirectory ? 'dir' : 'file'})`);
|
||||
}
|
||||
|
||||
// Check for specific file
|
||||
if (await archive.hasFile('package.json')) {
|
||||
const pkgFile = await archive.extractFile('package.json');
|
||||
console.log(pkgFile?.contents.toString());
|
||||
}
|
||||
```
|
||||
|
||||
### Working with GZIP files
|
||||
|
||||
```typescript
|
||||
import { SmartArchive, GzipTools } from '@push.rocks/smartarchive';
|
||||
import { createReadStream, createWriteStream } from 'fs';
|
||||
|
||||
// Decompress a .gz file - provide filename since gzip doesn't store it
|
||||
const gzipArchive = await SmartArchive.fromArchiveFile('./data.json.gz');
|
||||
await gzipArchive.exportToFs('./decompressed', 'data.json');
|
||||
// Decompress a .gz file
|
||||
await SmartArchive.create()
|
||||
.file('./data.json.gz')
|
||||
.fileName('data.json') // Specify output name (gzip doesn't store filename)
|
||||
.extract('./decompressed');
|
||||
|
||||
// Use GzipTools directly for streaming decompression
|
||||
// Use GzipTools directly for compression/decompression
|
||||
const gzipTools = new GzipTools();
|
||||
const decompressStream = gzipTools.getDecompressionStream();
|
||||
|
||||
createReadStream('./compressed.gz')
|
||||
.pipe(decompressStream)
|
||||
.pipe(createWriteStream('./decompressed.txt'));
|
||||
// Compress a buffer (sync and async available)
|
||||
const input = new TextEncoder().encode('Hello World');
|
||||
const compressed = gzipTools.compressSync(input, 9);
|
||||
const decompressed = gzipTools.decompressSync(compressed);
|
||||
|
||||
// Async versions (internally use sync for cross-runtime compatibility)
|
||||
const compressedAsync = await gzipTools.compress(input, 6);
|
||||
const decompressedAsync = await gzipTools.decompress(compressedAsync);
|
||||
```
|
||||
|
||||
### Working with BZIP2 files
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
// Handle .bz2 files
|
||||
const bzipArchive = await SmartArchive.fromArchiveUrl(
|
||||
'https://example.com/data.bz2'
|
||||
);
|
||||
await bzipArchive.exportToFs('./extracted', 'data.txt');
|
||||
```
|
||||
|
||||
### In-memory processing (no filesystem)
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
import { Readable } from 'stream';
|
||||
|
||||
// Process archives entirely in memory
|
||||
const compressedBuffer = await fetchCompressedData();
|
||||
const memoryStream = Readable.from(compressedBuffer);
|
||||
|
||||
const archive = await SmartArchive.fromArchiveStream(memoryStream);
|
||||
const streamFiles = await archive.exportToStreamOfStreamFiles();
|
||||
|
||||
const extractedFiles: Array<{ name: string; content: Buffer }> = [];
|
||||
|
||||
streamFiles.on('data', async (streamFile) => {
|
||||
const chunks: Buffer[] = [];
|
||||
const readStream = await streamFile.createReadStream();
|
||||
|
||||
for await (const chunk of readStream) {
|
||||
chunks.push(chunk);
|
||||
}
|
||||
|
||||
extractedFiles.push({
|
||||
name: streamFile.relativeFilePath,
|
||||
content: Buffer.concat(chunks)
|
||||
});
|
||||
});
|
||||
|
||||
await new Promise((resolve) => streamFiles.on('end', resolve));
|
||||
console.log(`Extracted ${extractedFiles.length} files in memory`);
|
||||
```
|
||||
|
||||
### Nested archive handling (e.g., .tar.gz)
|
||||
|
||||
The library automatically handles nested compression. A `.tar.gz` file is:
|
||||
1. First decompressed from gzip
|
||||
2. Then unpacked from tar
|
||||
|
||||
This happens transparently:
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
// Automatically handles gzip → tar extraction chain
|
||||
const tgzArchive = await SmartArchive.fromArchiveFile('./package.tar.gz');
|
||||
await tgzArchive.exportToFs('./extracted');
|
||||
```
|
||||
|
||||
## API Reference 📚
|
||||
|
||||
### SmartArchive Class
|
||||
|
||||
The main entry point for archive operations.
|
||||
|
||||
#### Static Factory Methods
|
||||
|
||||
```typescript
|
||||
// Create from URL - downloads and processes archive
|
||||
SmartArchive.fromArchiveUrl(url: string): Promise<SmartArchive>
|
||||
|
||||
// Create from local file path
|
||||
SmartArchive.fromArchiveFile(path: string): Promise<SmartArchive>
|
||||
|
||||
// Create from any Node.js readable stream
|
||||
SmartArchive.fromArchiveStream(stream: Readable | Duplex | Transform): Promise<SmartArchive>
|
||||
```
|
||||
|
||||
#### Instance Methods
|
||||
|
||||
```typescript
|
||||
// Extract all files to a directory
|
||||
// fileName is optional - used for single-file archives (like .gz) that don't store filename
|
||||
exportToFs(targetDir: string, fileName?: string): Promise<void>
|
||||
|
||||
// Get a stream that emits StreamFile objects for each file in the archive
|
||||
exportToStreamOfStreamFiles(): Promise<StreamIntake<StreamFile>>
|
||||
|
||||
// Get the raw archive stream (useful for piping)
|
||||
getArchiveStream(): Promise<Readable>
|
||||
```
|
||||
|
||||
#### Instance Properties
|
||||
|
||||
```typescript
|
||||
archive.tarTools // TarTools instance for TAR-specific operations
|
||||
archive.zipTools // ZipTools instance for ZIP-specific operations
|
||||
archive.gzipTools // GzipTools instance for GZIP-specific operations
|
||||
archive.bzip2Tools // Bzip2Tools instance for BZIP2-specific operations
|
||||
archive.archiveAnalyzer // ArchiveAnalyzer for inspecting archive type
|
||||
```
|
||||
|
||||
### TarTools Class
|
||||
|
||||
TAR-specific operations for creating and extracting TAR archives.
|
||||
### Working with TAR archives directly
|
||||
|
||||
```typescript
|
||||
import { TarTools } from '@push.rocks/smartarchive';
|
||||
|
||||
const tarTools = new TarTools();
|
||||
|
||||
// Get a tar pack stream for creating archives
|
||||
const pack = await tarTools.getPackStream();
|
||||
// Create a TAR archive from entries (buffer-based, good for small files)
|
||||
const tarBuffer = await tarTools.packFiles([
|
||||
{ archivePath: 'hello.txt', content: 'Hello, World!' },
|
||||
{ archivePath: 'data.json', content: JSON.stringify({ foo: 'bar' }) }
|
||||
]);
|
||||
|
||||
// Add files to a pack stream
|
||||
await tarTools.addFileToPack(pack, {
|
||||
fileName: 'file.txt', // Name in archive
|
||||
content: 'Hello World', // String, Buffer, Readable, SmartFile, or StreamFile
|
||||
byteLength?: number, // Optional: specify size for streams
|
||||
filePath?: string // Optional: path to file on disk
|
||||
});
|
||||
// Create a TAR.GZ archive
|
||||
const tgzBuffer = await tarTools.packFilesToTarGz([
|
||||
{ archivePath: 'file.txt', content: 'Compressed content' }
|
||||
], 6);
|
||||
|
||||
// Pack an entire directory
|
||||
const pack = await tarTools.packDirectory('./src');
|
||||
// Extract a TAR archive
|
||||
const entries = await tarTools.extractTar(tarBuffer);
|
||||
for (const entry of entries) {
|
||||
console.log(`${entry.path}: ${entry.isDirectory ? 'dir' : 'file'}`);
|
||||
}
|
||||
|
||||
// Get extraction stream
|
||||
const extract = tarTools.getDecompressionStream();
|
||||
// Extract a TAR.GZ archive
|
||||
const tgzEntries = await tarTools.extractTarGz(tgzBuffer);
|
||||
|
||||
// Node.js only: Pack a directory (buffer-based)
|
||||
const dirBuffer = await tarTools.packDirectory('./src');
|
||||
const dirTgzBuffer = await tarTools.packDirectoryToTarGz('./src', 9);
|
||||
```
|
||||
|
||||
### ZipTools Class
|
||||
### Streaming TAR for Large Files (Node.js only) 🚀
|
||||
|
||||
ZIP-specific operations.
|
||||
For large files that don't fit in memory, use the streaming APIs:
|
||||
|
||||
```typescript
|
||||
import { TarTools } from '@push.rocks/smartarchive';
|
||||
import * as fs from 'fs';
|
||||
|
||||
const tarTools = new TarTools();
|
||||
|
||||
// ===== STREAMING PACK =====
|
||||
// Create a TAR pack stream - files are processed one at a time
|
||||
const pack = tarTools.getPackStream();
|
||||
|
||||
// Add files with streaming content (requires size for streams)
|
||||
await tarTools.addFileToPack(pack, {
|
||||
fileName: 'small.txt',
|
||||
content: 'Hello World' // Strings and buffers auto-detect size
|
||||
});
|
||||
|
||||
await tarTools.addFileToPack(pack, {
|
||||
fileName: 'large-video.mp4',
|
||||
content: fs.createReadStream('./video.mp4'),
|
||||
size: fs.statSync('./video.mp4').size // Size required for streams
|
||||
});
|
||||
|
||||
pack.finalize();
|
||||
pack.pipe(fs.createWriteStream('output.tar'));
|
||||
|
||||
// ===== STREAMING DIRECTORY PACK =====
|
||||
// Pack entire directory with true streaming (no buffering)
|
||||
const tarStream = await tarTools.getDirectoryPackStream('./large-folder');
|
||||
tarStream.pipe(fs.createWriteStream('backup.tar'));
|
||||
|
||||
// With GZIP compression
|
||||
const tgzStream = await tarTools.getDirectoryPackStreamGz('./large-folder', 6);
|
||||
tgzStream.pipe(fs.createWriteStream('backup.tar.gz'));
|
||||
|
||||
// ===== STREAMING EXTRACT =====
|
||||
// Extract large archives without loading into memory
|
||||
const extract = tarTools.getExtractStream();
|
||||
|
||||
extract.on('entry', (header, stream, next) => {
|
||||
console.log(`Extracting: ${header.name} (${header.size} bytes)`);
|
||||
|
||||
const writeStream = fs.createWriteStream(`./out/${header.name}`);
|
||||
stream.pipe(writeStream);
|
||||
writeStream.on('finish', next);
|
||||
});
|
||||
|
||||
extract.on('finish', () => console.log('Extraction complete'));
|
||||
|
||||
fs.createReadStream('large-archive.tar').pipe(extract);
|
||||
|
||||
// Or use the convenient directory extraction
|
||||
await tarTools.extractToDirectory(
|
||||
fs.createReadStream('archive.tar'),
|
||||
'./output-folder'
|
||||
);
|
||||
```
|
||||
|
||||
### Working with ZIP archives directly
|
||||
|
||||
```typescript
|
||||
import { ZipTools } from '@push.rocks/smartarchive';
|
||||
|
||||
const zipTools = new ZipTools();
|
||||
|
||||
// Get compression stream (for creating ZIP)
|
||||
const compressor = zipTools.getCompressionStream();
|
||||
// Create a ZIP archive from entries
|
||||
const zipBuffer = await zipTools.createZip([
|
||||
{ archivePath: 'readme.txt', content: 'Hello!' },
|
||||
{ archivePath: 'data.bin', content: new Uint8Array([0x00, 0x01, 0x02]) }
|
||||
], 6);
|
||||
|
||||
// Get decompression stream (for extracting ZIP)
|
||||
const decompressor = zipTools.getDecompressionStream();
|
||||
// Extract a ZIP buffer
|
||||
const entries = await zipTools.extractZip(zipBuffer);
|
||||
for (const entry of entries) {
|
||||
console.log(`${entry.path}: ${entry.content.length} bytes`);
|
||||
}
|
||||
```
|
||||
|
||||
### GzipTools Class
|
||||
|
||||
GZIP compression/decompression streams.
|
||||
|
||||
```typescript
|
||||
import { GzipTools } from '@push.rocks/smartarchive';
|
||||
|
||||
const gzipTools = new GzipTools();
|
||||
|
||||
// Get compression stream
|
||||
const compressor = gzipTools.getCompressionStream();
|
||||
|
||||
// Get decompression stream
|
||||
const decompressor = gzipTools.getDecompressionStream();
|
||||
```
|
||||
|
||||
## Supported Formats 📋
|
||||
|
||||
| Format | Extension(s) | Extract | Create |
|
||||
|--------|--------------|---------|--------|
|
||||
| TAR | `.tar` | ✅ | ✅ |
|
||||
| TAR.GZ / TGZ | `.tar.gz`, `.tgz` | ✅ | ⚠️ |
|
||||
| ZIP | `.zip` | ✅ | ⚠️ |
|
||||
| GZIP | `.gz` | ✅ | ✅ |
|
||||
| BZIP2 | `.bz2` | ✅ | ❌ |
|
||||
|
||||
✅ Full support | ⚠️ Partial/basic support | ❌ Not supported
|
||||
|
||||
## Performance Tips 🏎️
|
||||
|
||||
1. **Use streaming for large files** – Avoid loading entire archives into memory with `exportToStreamOfStreamFiles()`
|
||||
2. **Provide byte lengths when known** – When adding streams to TAR, provide `byteLength` for better performance
|
||||
3. **Process files as they stream** – Don't collect all files into an array unless necessary
|
||||
4. **Choose the right format** – TAR.GZ for Unix/compression, ZIP for cross-platform compatibility
|
||||
|
||||
## Error Handling 🛡️
|
||||
### In-memory round-trip
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
try {
|
||||
const archive = await SmartArchive.fromArchiveUrl('https://example.com/file.zip');
|
||||
await archive.exportToFs('./output');
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
console.error('Archive file not found');
|
||||
} else if (error.code === 'EACCES') {
|
||||
console.error('Permission denied');
|
||||
} else if (error.message.includes('fetch')) {
|
||||
console.error('Network error downloading archive');
|
||||
} else {
|
||||
console.error('Archive extraction failed:', error.message);
|
||||
}
|
||||
// Create archive in memory
|
||||
const archive = await SmartArchive.create()
|
||||
.format('tar.gz')
|
||||
.entry('config.json', JSON.stringify({ version: '1.0.0' }))
|
||||
.build();
|
||||
|
||||
const buffer = await archive.toBuffer();
|
||||
|
||||
// Extract from buffer
|
||||
const files = await SmartArchive.create()
|
||||
.buffer(buffer)
|
||||
.toSmartFiles();
|
||||
|
||||
for (const file of files) {
|
||||
console.log(`${file.relative}: ${file.contents.toString()}`);
|
||||
}
|
||||
```
|
||||
|
||||
@@ -395,66 +499,156 @@ try {
|
||||
### CI/CD: Download & Extract Build Artifacts
|
||||
|
||||
```typescript
|
||||
const artifacts = await SmartArchive.fromArchiveUrl(
|
||||
`${CI_SERVER}/artifacts/build-${BUILD_ID}.zip`
|
||||
);
|
||||
await artifacts.exportToFs('./dist');
|
||||
const artifacts = await SmartArchive.create()
|
||||
.url(`${CI_SERVER}/artifacts/build-${BUILD_ID}.zip`)
|
||||
.stripComponents(1)
|
||||
.extract('./dist');
|
||||
```
|
||||
|
||||
### Backup System: Restore from Archive
|
||||
### Backup System
|
||||
|
||||
```typescript
|
||||
const backup = await SmartArchive.fromArchiveFile('./backup-2024.tar.gz');
|
||||
await backup.exportToFs('/restore/location');
|
||||
// Create backup
|
||||
await SmartArchive.create()
|
||||
.format('tar.gz')
|
||||
.compression(9)
|
||||
.directory('./data')
|
||||
.toFile(`./backups/backup-${Date.now()}.tar.gz`);
|
||||
|
||||
// Restore backup
|
||||
await SmartArchive.create()
|
||||
.file('./backups/backup-latest.tar.gz')
|
||||
.extract('/restore/location');
|
||||
```
|
||||
|
||||
### NPM Package Inspection
|
||||
### Bundle files for HTTP download
|
||||
|
||||
```typescript
|
||||
const pkg = await SmartArchive.fromArchiveUrl(
|
||||
'https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz'
|
||||
);
|
||||
const files = await pkg.exportToStreamOfStreamFiles();
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
files.on('data', async (file) => {
|
||||
if (file.relativeFilePath.includes('package.json')) {
|
||||
const stream = await file.createReadStream();
|
||||
// Read and analyze package.json
|
||||
}
|
||||
// Express/Fastify handler
|
||||
app.get('/download-bundle', async (req, res) => {
|
||||
const buffer = await SmartArchive.create()
|
||||
.format('zip')
|
||||
.entry('report.pdf', pdfBuffer)
|
||||
.entry('data.xlsx', excelBuffer)
|
||||
.entry('images/chart.png', chartBuffer)
|
||||
.toBuffer();
|
||||
|
||||
res.setHeader('Content-Type', 'application/zip');
|
||||
res.setHeader('Content-Disposition', 'attachment; filename=report-bundle.zip');
|
||||
res.send(buffer);
|
||||
});
|
||||
```
|
||||
|
||||
### Data Pipeline: Process Compressed Datasets
|
||||
|
||||
```typescript
|
||||
const dataset = await SmartArchive.fromArchiveUrl(
|
||||
'https://data.source/dataset.tar.gz'
|
||||
);
|
||||
const fileStream = await SmartArchive.create()
|
||||
.url('https://data.source/dataset.tar.gz')
|
||||
.toStreamFiles();
|
||||
|
||||
const files = await dataset.exportToStreamOfStreamFiles();
|
||||
files.on('data', async (file) => {
|
||||
fileStream.on('data', async (file) => {
|
||||
if (file.relativeFilePath.endsWith('.csv')) {
|
||||
const stream = await file.createReadStream();
|
||||
// Stream CSV processing
|
||||
const content = await file.getContentAsBuffer();
|
||||
// Stream CSV processing...
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
## Supported Formats 📋
|
||||
|
||||
| Format | Extension(s) | Extract | Create | Browser |
|
||||
|--------|--------------|---------|--------|---------|
|
||||
| TAR | `.tar` | ✅ | ✅ | ✅ |
|
||||
| TAR.GZ / TGZ | `.tar.gz`, `.tgz` | ✅ | ✅ | ✅ |
|
||||
| ZIP | `.zip` | ✅ | ✅ | ✅ |
|
||||
| GZIP | `.gz` | ✅ | ✅ | ✅ |
|
||||
| BZIP2 | `.bz2` | ✅ | ❌ | ✅ |
|
||||
|
||||
## Type Definitions
|
||||
|
||||
```typescript
|
||||
// Supported archive formats
|
||||
type TArchiveFormat = 'tar' | 'tar.gz' | 'tgz' | 'zip' | 'gz' | 'bz2';
|
||||
|
||||
// Compression level (0 = none, 9 = maximum)
|
||||
type TCompressionLevel = 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9;
|
||||
|
||||
// Entry for creating archives
|
||||
interface IArchiveEntry {
|
||||
archivePath: string;
|
||||
content: string | Buffer | Uint8Array | SmartFile | StreamFile;
|
||||
size?: number;
|
||||
mode?: number;
|
||||
mtime?: Date;
|
||||
}
|
||||
|
||||
// Information about an archive entry
|
||||
interface IArchiveEntryInfo {
|
||||
path: string;
|
||||
size: number;
|
||||
isDirectory: boolean;
|
||||
isFile: boolean;
|
||||
mtime?: Date;
|
||||
mode?: number;
|
||||
}
|
||||
|
||||
// Archive analysis result
|
||||
interface IArchiveInfo {
|
||||
format: TArchiveFormat | null;
|
||||
isCompressed: boolean;
|
||||
isArchive: boolean;
|
||||
entries?: IArchiveEntryInfo[];
|
||||
}
|
||||
```
|
||||
|
||||
## Performance Tips 🏎️
|
||||
|
||||
1. **Use streaming for large files** – `.toStreamFiles()` processes entries one at a time without loading the entire archive
|
||||
2. **Choose appropriate compression** – Use 1-3 for speed, 6 (default) for balance, 9 for maximum compression
|
||||
3. **Filter early** – Use `.include()`/`.exclude()` to skip unwanted entries before processing
|
||||
4. **Use Uint8Array in browsers** – The browser bundle works with `Uint8Array` for optimal performance
|
||||
|
||||
## Error Handling 🛡️
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
try {
|
||||
await SmartArchive.create()
|
||||
.url('https://example.com/file.zip')
|
||||
.extract('./output');
|
||||
} catch (error) {
|
||||
if (error.message.includes('No source configured')) {
|
||||
console.error('Forgot to specify source');
|
||||
} else if (error.message.includes('No format specified')) {
|
||||
console.error('Forgot to set format for creation');
|
||||
} else if (error.message.includes('extraction mode')) {
|
||||
console.error('Cannot mix extraction and creation methods');
|
||||
} else {
|
||||
console.error('Archive operation failed:', error.message);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## License and Legal Information
|
||||
|
||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
||||
This repository contains open-source code licensed under the MIT License. A copy of the license can be found in the [LICENSE](./LICENSE) file.
|
||||
|
||||
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||
|
||||
### Trademarks
|
||||
|
||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
|
||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH or third parties, and are not included within the scope of the MIT license granted herein.
|
||||
|
||||
Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines or the guidelines of the respective third-party owners, and any usage must be approved in writing. Third-party trademarks used herein are the property of their respective owners and used only in a descriptive manner, e.g. for an implementation of an API or similar.
|
||||
|
||||
### Company Information
|
||||
|
||||
Task Venture Capital GmbH
|
||||
Registered at District court Bremen HRB 35230 HB, Germany
|
||||
Registered at District Court Bremen HRB 35230 HB, Germany
|
||||
|
||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
||||
For any legal inquiries or further information, please contact us via email at hello@task.vc.
|
||||
|
||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||
|
||||
@@ -22,7 +22,7 @@ tap.test('should create and extract a gzip file', async () => {
|
||||
const testContent = 'This is a test file for gzip compression and decompression.\n'.repeat(100);
|
||||
const testFileName = 'test-file.txt';
|
||||
const gzipFileName = 'test-file.txt.gz';
|
||||
|
||||
|
||||
// Write the original file
|
||||
await plugins.fsPromises.writeFile(
|
||||
plugins.path.join(testPaths.gzipTestDir, testFileName),
|
||||
@@ -36,24 +36,22 @@ tap.test('should create and extract a gzip file', async () => {
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName),
|
||||
Buffer.from(compressed)
|
||||
);
|
||||
|
||||
// Now test extraction using SmartArchive
|
||||
const gzipArchive = await smartarchive.SmartArchive.fromFile(
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
// Export to a new location
|
||||
// Now test extraction using SmartArchive fluent API
|
||||
const extractPath = plugins.path.join(testPaths.gzipTestDir, 'extracted');
|
||||
await plugins.fsPromises.mkdir(extractPath, { recursive: true });
|
||||
// Provide a filename since gzip doesn't contain filename metadata
|
||||
await gzipArchive.extractToDirectory(extractPath, { fileName: 'test-file.txt' });
|
||||
|
||||
await smartarchive.SmartArchive.create()
|
||||
.file(plugins.path.join(testPaths.gzipTestDir, gzipFileName))
|
||||
.fileName('test-file.txt')
|
||||
.extract(extractPath);
|
||||
|
||||
// Read the extracted file
|
||||
const extractedContent = await plugins.fsPromises.readFile(
|
||||
plugins.path.join(extractPath, 'test-file.txt'),
|
||||
'utf8'
|
||||
);
|
||||
|
||||
|
||||
// Verify the content matches
|
||||
expect(extractedContent).toEqual(testContent);
|
||||
});
|
||||
@@ -62,7 +60,7 @@ tap.test('should handle gzip stream extraction', async () => {
|
||||
// Create test data
|
||||
const testContent = 'Stream test data for gzip\n'.repeat(50);
|
||||
const gzipFileName = 'stream-test.txt.gz';
|
||||
|
||||
|
||||
// Create gzip compressed version
|
||||
const fflate = await import('fflate');
|
||||
const compressed = fflate.gzipSync(Buffer.from(testContent));
|
||||
@@ -75,14 +73,13 @@ tap.test('should handle gzip stream extraction', async () => {
|
||||
const gzipStream = plugins.fs.createReadStream(
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
// Test extraction using SmartArchive from stream
|
||||
const gzipArchive = await smartarchive.SmartArchive.fromStream(gzipStream);
|
||||
|
||||
// Export to stream and collect the result
|
||||
// Test extraction using SmartArchive from stream with fluent API
|
||||
const streamFiles: any[] = [];
|
||||
const resultStream = await gzipArchive.extractToStream();
|
||||
|
||||
const resultStream = await smartarchive.SmartArchive.create()
|
||||
.stream(gzipStream)
|
||||
.toStreamFiles();
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
resultStream.on('data', (streamFile) => {
|
||||
streamFiles.push(streamFile);
|
||||
@@ -90,10 +87,10 @@ tap.test('should handle gzip stream extraction', async () => {
|
||||
resultStream.on('end', resolve);
|
||||
resultStream.on('error', reject);
|
||||
});
|
||||
|
||||
|
||||
// Verify we got the expected file
|
||||
expect(streamFiles.length).toBeGreaterThan(0);
|
||||
|
||||
|
||||
// Read content from the stream file
|
||||
if (streamFiles[0]) {
|
||||
const chunks: Buffer[] = [];
|
||||
@@ -103,7 +100,7 @@ tap.test('should handle gzip stream extraction', async () => {
|
||||
readStream.on('end', resolve);
|
||||
readStream.on('error', reject);
|
||||
});
|
||||
|
||||
|
||||
const extractedContent = Buffer.concat(chunks).toString();
|
||||
expect(extractedContent).toEqual(testContent);
|
||||
}
|
||||
@@ -112,36 +109,32 @@ tap.test('should handle gzip stream extraction', async () => {
|
||||
tap.test('should handle gzip files with original filename in header', async () => {
|
||||
// Test with a real-world gzip file that includes filename in header
|
||||
const testContent = 'File with name in gzip header\n'.repeat(30);
|
||||
const originalFileName = 'original-name.log';
|
||||
const gzipFileName = 'compressed.gz';
|
||||
|
||||
|
||||
// Create a proper gzip with filename header using Node's zlib
|
||||
const zlib = await import('node:zlib');
|
||||
const gzipBuffer = await new Promise<Buffer>((resolve, reject) => {
|
||||
zlib.gzip(Buffer.from(testContent), {
|
||||
zlib.gzip(Buffer.from(testContent), {
|
||||
level: 9,
|
||||
// Note: Node's zlib doesn't support embedding filename directly,
|
||||
// but we can test the extraction anyway
|
||||
}, (err, result) => {
|
||||
if (err) reject(err);
|
||||
else resolve(result);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
await plugins.fsPromises.writeFile(
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName),
|
||||
gzipBuffer
|
||||
);
|
||||
|
||||
// Test extraction
|
||||
const gzipArchive = await smartarchive.SmartArchive.fromFile(
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
// Test extraction with fluent API
|
||||
const extractPath = plugins.path.join(testPaths.gzipTestDir, 'header-test');
|
||||
await plugins.fsPromises.mkdir(extractPath, { recursive: true });
|
||||
// Provide a filename since gzip doesn't reliably contain filename metadata
|
||||
await gzipArchive.extractToDirectory(extractPath, { fileName: 'compressed.txt' });
|
||||
|
||||
await smartarchive.SmartArchive.create()
|
||||
.file(plugins.path.join(testPaths.gzipTestDir, gzipFileName))
|
||||
.fileName('compressed.txt')
|
||||
.extract(extractPath);
|
||||
|
||||
// Check if file was extracted (name might be derived from archive name)
|
||||
const files = await plugins.listFileTree(extractPath, '**/*');
|
||||
@@ -160,7 +153,7 @@ tap.test('should handle large gzip files', async () => {
|
||||
// Create a larger test file
|
||||
const largeContent = 'x'.repeat(1024 * 1024); // 1MB of 'x' characters
|
||||
const gzipFileName = 'large-file.txt.gz';
|
||||
|
||||
|
||||
// Compress the large file
|
||||
const fflate = await import('fflate');
|
||||
const compressed = fflate.gzipSync(Buffer.from(largeContent));
|
||||
@@ -169,15 +162,14 @@ tap.test('should handle large gzip files', async () => {
|
||||
Buffer.from(compressed)
|
||||
);
|
||||
|
||||
// Test extraction
|
||||
const gzipArchive = await smartarchive.SmartArchive.fromFile(
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
// Test extraction with fluent API
|
||||
const extractPath = plugins.path.join(testPaths.gzipTestDir, 'large-extracted');
|
||||
await plugins.fsPromises.mkdir(extractPath, { recursive: true });
|
||||
// Provide a filename since gzip doesn't contain filename metadata
|
||||
await gzipArchive.extractToDirectory(extractPath, { fileName: 'large-file.txt' });
|
||||
|
||||
await smartarchive.SmartArchive.create()
|
||||
.file(plugins.path.join(testPaths.gzipTestDir, gzipFileName))
|
||||
.fileName('large-file.txt')
|
||||
.extract(extractPath);
|
||||
|
||||
// Verify the extracted content
|
||||
const files = await plugins.listFileTree(extractPath, '**/*');
|
||||
@@ -195,14 +187,13 @@ tap.test('should handle real-world multi-chunk gzip from URL', async () => {
|
||||
// Test with a real tgz file that will be processed in multiple chunks
|
||||
const testUrl = 'https://registry.npmjs.org/@push.rocks/smartfile/-/smartfile-11.2.7.tgz';
|
||||
|
||||
// Download and extract the archive
|
||||
const testArchive = await smartarchive.SmartArchive.fromUrl(testUrl);
|
||||
|
||||
// Download and extract the archive with fluent API
|
||||
const extractPath = plugins.path.join(testPaths.gzipTestDir, 'real-world-test');
|
||||
await plugins.fsPromises.mkdir(extractPath, { recursive: true });
|
||||
|
||||
// This will test multi-chunk decompression as the file is larger
|
||||
await testArchive.extractToDirectory(extractPath);
|
||||
await smartarchive.SmartArchive.create()
|
||||
.url(testUrl)
|
||||
.extract(extractPath);
|
||||
|
||||
// Verify extraction worked
|
||||
const files = await plugins.listFileTree(extractPath, '**/*');
|
||||
@@ -265,22 +256,17 @@ tap.test('should handle real-world multi-chunk gzip from URL', async () => {
|
||||
tap.test('should handle gzip extraction fully in memory', async () => {
|
||||
// Create test data in memory
|
||||
const testContent = 'This is test data for in-memory gzip processing\n'.repeat(100);
|
||||
|
||||
|
||||
// Compress using fflate in memory
|
||||
const fflate = await import('fflate');
|
||||
const compressed = fflate.gzipSync(Buffer.from(testContent));
|
||||
|
||||
// Create a stream from the compressed data
|
||||
const { Readable } = await import('node:stream');
|
||||
const compressedStream = Readable.from(Buffer.from(compressed));
|
||||
|
||||
// Process through SmartArchive without touching filesystem
|
||||
const gzipArchive = await smartarchive.SmartArchive.fromStream(compressedStream);
|
||||
|
||||
// Export to stream of stream files (in memory)
|
||||
// Process through SmartArchive without touching filesystem using fluent API
|
||||
const streamFiles: plugins.smartfile.StreamFile[] = [];
|
||||
const resultStream = await gzipArchive.extractToStream();
|
||||
|
||||
const resultStream = await smartarchive.SmartArchive.create()
|
||||
.buffer(Buffer.from(compressed))
|
||||
.toStreamFiles();
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
resultStream.on('data', (streamFile: plugins.smartfile.StreamFile) => {
|
||||
streamFiles.push(streamFile);
|
||||
@@ -288,21 +274,21 @@ tap.test('should handle gzip extraction fully in memory', async () => {
|
||||
resultStream.on('end', resolve);
|
||||
resultStream.on('error', reject);
|
||||
});
|
||||
|
||||
|
||||
// Verify we got a file
|
||||
expect(streamFiles.length).toBeGreaterThan(0);
|
||||
|
||||
|
||||
// Read the content from memory without filesystem
|
||||
const firstFile = streamFiles[0];
|
||||
const chunks: Buffer[] = [];
|
||||
const readStream = await firstFile.createReadStream();
|
||||
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
readStream.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
readStream.on('end', resolve);
|
||||
readStream.on('error', reject);
|
||||
});
|
||||
|
||||
|
||||
const extractedContent = Buffer.concat(chunks).toString();
|
||||
expect(extractedContent).toEqual(testContent);
|
||||
console.log(` ✓ In-memory extraction successful (${extractedContent.length} bytes)`);
|
||||
@@ -314,88 +300,83 @@ tap.test('should handle real tgz file fully in memory', async (tools) => {
|
||||
const response = await plugins.smartrequest.SmartRequest.create()
|
||||
.url('https://registry.npmjs.org/@push.rocks/smartfile/-/smartfile-11.2.7.tgz')
|
||||
.get();
|
||||
|
||||
|
||||
const tgzBuffer = Buffer.from(await response.arrayBuffer());
|
||||
console.log(` Downloaded ${tgzBuffer.length} bytes into memory`);
|
||||
|
||||
// Create stream from buffer
|
||||
const { Readable: Readable2 } = await import('node:stream');
|
||||
const tgzStream = Readable2.from(tgzBuffer);
|
||||
|
||||
// Process through SmartArchive in memory
|
||||
const archive = await smartarchive.SmartArchive.fromStream(tgzStream);
|
||||
|
||||
// Export to stream of stream files (in memory)
|
||||
// Process through SmartArchive in memory with fluent API
|
||||
const streamFiles: plugins.smartfile.StreamFile[] = [];
|
||||
const resultStream = await archive.extractToStream();
|
||||
|
||||
const resultStream = await smartarchive.SmartArchive.create()
|
||||
.buffer(tgzBuffer)
|
||||
.toStreamFiles();
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
let timeout: NodeJS.Timeout;
|
||||
|
||||
|
||||
const cleanup = () => {
|
||||
clearTimeout(timeout);
|
||||
};
|
||||
|
||||
|
||||
timeout = setTimeout(() => {
|
||||
cleanup();
|
||||
resolve(); // Resolve after timeout if stream doesn't end
|
||||
}, 5000);
|
||||
|
||||
|
||||
resultStream.on('data', (streamFile: plugins.smartfile.StreamFile) => {
|
||||
streamFiles.push(streamFile);
|
||||
});
|
||||
|
||||
|
||||
resultStream.on('end', () => {
|
||||
cleanup();
|
||||
resolve();
|
||||
});
|
||||
|
||||
|
||||
resultStream.on('error', (err) => {
|
||||
cleanup();
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
console.log(` Extracted ${streamFiles.length} files in memory`);
|
||||
// At minimum we should have extracted something
|
||||
expect(streamFiles.length).toBeGreaterThan(0);
|
||||
|
||||
|
||||
// Find and read package.json from memory
|
||||
const packageJsonFile = streamFiles.find(f => f.relativeFilePath?.includes('package.json'));
|
||||
|
||||
|
||||
if (packageJsonFile) {
|
||||
const chunks: Buffer[] = [];
|
||||
const readStream = await packageJsonFile.createReadStream();
|
||||
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
readStream.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
readStream.on('end', resolve);
|
||||
readStream.on('error', reject);
|
||||
});
|
||||
|
||||
|
||||
const packageJsonContent = Buffer.concat(chunks).toString();
|
||||
const packageJson = JSON.parse(packageJsonContent);
|
||||
expect(packageJson.name).toEqual('@push.rocks/smartfile');
|
||||
expect(packageJson.version).toEqual('11.2.7');
|
||||
console.log(` ✓ Read package.json from memory: ${packageJson.name}@${packageJson.version}`);
|
||||
}
|
||||
|
||||
|
||||
// Read a few more files to verify integrity
|
||||
const filesToCheck = streamFiles.slice(0, 3);
|
||||
for (const file of filesToCheck) {
|
||||
const chunks: Buffer[] = [];
|
||||
const readStream = await file.createReadStream();
|
||||
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
readStream.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
readStream.on('end', resolve);
|
||||
readStream.on('error', reject);
|
||||
});
|
||||
|
||||
|
||||
const content = Buffer.concat(chunks);
|
||||
expect(content.length).toBeGreaterThan(0);
|
||||
console.log(` ✓ Read ${file.relativeFilePath} from memory (${content.length} bytes)`);
|
||||
}
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
export default tap.start();
|
||||
|
||||
@@ -32,20 +32,209 @@ tap.preTask('should prepare downloads', async (tools) => {
|
||||
);
|
||||
});
|
||||
|
||||
tap.test('should extract existing files on disk', async () => {
|
||||
const testSmartarchive = await smartarchive.SmartArchive.fromUrl(
|
||||
'https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz',
|
||||
);
|
||||
await testSmartarchive.extractToDirectory(testPaths.nogitDir);
|
||||
tap.test('should extract existing files on disk using fluent API', async () => {
|
||||
await smartarchive.SmartArchive.create()
|
||||
.url('https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz')
|
||||
.extract(testPaths.nogitDir);
|
||||
});
|
||||
|
||||
tap.test('should extract from file using fluent API', async () => {
|
||||
const extractPath = plugins.path.join(testPaths.nogitDir, 'from-file-test');
|
||||
await plugins.fsPromises.mkdir(extractPath, { recursive: true });
|
||||
|
||||
await smartarchive.SmartArchive.create()
|
||||
.file(plugins.path.join(testPaths.nogitDir, 'test.tgz'))
|
||||
.extract(extractPath);
|
||||
|
||||
const files = await plugins.listFileTree(extractPath, '**/*');
|
||||
expect(files.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
tap.test('should extract with stripComponents using fluent API', async () => {
|
||||
const extractPath = plugins.path.join(testPaths.nogitDir, 'strip-test');
|
||||
await plugins.fsPromises.mkdir(extractPath, { recursive: true });
|
||||
|
||||
await smartarchive.SmartArchive.create()
|
||||
.url('https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz')
|
||||
.stripComponents(1)
|
||||
.extract(extractPath);
|
||||
|
||||
const files = await plugins.listFileTree(extractPath, '**/*');
|
||||
expect(files.length).toBeGreaterThan(0);
|
||||
// Files should not have 'package/' prefix
|
||||
const hasPackagePrefix = files.some(f => f.startsWith('package/'));
|
||||
expect(hasPackagePrefix).toBeFalse();
|
||||
});
|
||||
|
||||
tap.test('should extract with filter using fluent API', async () => {
|
||||
const extractPath = plugins.path.join(testPaths.nogitDir, 'filter-test');
|
||||
await plugins.fsPromises.mkdir(extractPath, { recursive: true });
|
||||
|
||||
await smartarchive.SmartArchive.create()
|
||||
.url('https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz')
|
||||
.filter(entry => entry.path.endsWith('.json'))
|
||||
.extract(extractPath);
|
||||
|
||||
const files = await plugins.listFileTree(extractPath, '**/*');
|
||||
// All extracted files should be JSON
|
||||
for (const file of files) {
|
||||
expect(file.endsWith('.json')).toBeTrue();
|
||||
}
|
||||
});
|
||||
|
||||
tap.test('should list archive entries using fluent API', async () => {
|
||||
const entries = await smartarchive.SmartArchive.create()
|
||||
.url('https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz')
|
||||
.list();
|
||||
|
||||
expect(entries.length).toBeGreaterThan(0);
|
||||
const hasPackageJson = entries.some(e => e.path.includes('package.json'));
|
||||
expect(hasPackageJson).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('should create archive using fluent API', async () => {
|
||||
const archive = await smartarchive.SmartArchive.create()
|
||||
.format('tar.gz')
|
||||
.compression(9)
|
||||
.entry('hello.txt', 'Hello World!')
|
||||
.entry('config.json', JSON.stringify({ name: 'test', version: '1.0.0' }));
|
||||
|
||||
expect(archive).toBeInstanceOf(smartarchive.SmartArchive);
|
||||
|
||||
const buffer = await archive.toBuffer();
|
||||
expect(buffer.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
tap.test('should create and write archive to file using fluent API', async () => {
|
||||
const outputPath = plugins.path.join(testPaths.nogitDir, 'created-archive.tar.gz');
|
||||
|
||||
await smartarchive.SmartArchive.create()
|
||||
.format('tar.gz')
|
||||
.entry('readme.txt', 'This is a test archive')
|
||||
.entry('data/info.json', JSON.stringify({ created: new Date().toISOString() }))
|
||||
.toFile(outputPath);
|
||||
|
||||
// Verify file was created
|
||||
const stats = await plugins.fsPromises.stat(outputPath);
|
||||
expect(stats.size).toBeGreaterThan(0);
|
||||
|
||||
// Verify we can extract it
|
||||
const extractPath = plugins.path.join(testPaths.nogitDir, 'verify-created');
|
||||
await smartarchive.SmartArchive.create()
|
||||
.file(outputPath)
|
||||
.extract(extractPath);
|
||||
|
||||
const files = await plugins.listFileTree(extractPath, '**/*');
|
||||
expect(files).toContain('readme.txt');
|
||||
});
|
||||
|
||||
tap.test('should create ZIP archive using fluent API', async () => {
|
||||
const outputPath = plugins.path.join(testPaths.nogitDir, 'created-archive.zip');
|
||||
|
||||
await smartarchive.SmartArchive.create()
|
||||
.format('zip')
|
||||
.entry('file1.txt', 'Content 1')
|
||||
.entry('file2.txt', 'Content 2')
|
||||
.toFile(outputPath);
|
||||
|
||||
// Verify file was created
|
||||
const stats = await plugins.fsPromises.stat(outputPath);
|
||||
expect(stats.size).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
tap.test('should extract to SmartFiles using fluent API', async () => {
|
||||
const smartFiles = await smartarchive.SmartArchive.create()
|
||||
.url('https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz')
|
||||
.toSmartFiles();
|
||||
|
||||
expect(smartFiles.length).toBeGreaterThan(0);
|
||||
|
||||
const packageJson = smartFiles.find(f => f.relative.includes('package.json'));
|
||||
expect(packageJson).toBeDefined();
|
||||
});
|
||||
|
||||
tap.test('should analyze archive using fluent API', async () => {
|
||||
const info = await smartarchive.SmartArchive.create()
|
||||
.file(plugins.path.join(testPaths.nogitDir, 'test.tgz'))
|
||||
.analyze();
|
||||
|
||||
expect(info.isArchive).toBeTrue();
|
||||
expect(info.isCompressed).toBeTrue();
|
||||
expect(info.format).toEqual('gz');
|
||||
});
|
||||
|
||||
tap.test('should check if file exists in archive using fluent API', async () => {
|
||||
const hasPackageJson = await smartarchive.SmartArchive.create()
|
||||
.url('https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz')
|
||||
.hasFile('package.json');
|
||||
|
||||
expect(hasPackageJson).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('should extract single file using fluent API', async () => {
|
||||
const packageJson = await smartarchive.SmartArchive.create()
|
||||
.url('https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz')
|
||||
.extractFile('package.json');
|
||||
|
||||
expect(packageJson).toBeDefined();
|
||||
expect(packageJson!.contents.toString()).toContain('websetup');
|
||||
});
|
||||
|
||||
tap.test('should handle include/exclude patterns', async () => {
|
||||
const smartFiles = await smartarchive.SmartArchive.create()
|
||||
.url('https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz')
|
||||
.include(/\.json$/)
|
||||
.toSmartFiles();
|
||||
|
||||
expect(smartFiles.length).toBeGreaterThan(0);
|
||||
for (const file of smartFiles) {
|
||||
expect(file.relative.endsWith('.json')).toBeTrue();
|
||||
}
|
||||
});
|
||||
|
||||
tap.test('should throw error when mixing modes', async () => {
|
||||
let threw = false;
|
||||
try {
|
||||
smartarchive.SmartArchive.create()
|
||||
.url('https://example.com/archive.tgz')
|
||||
.entry('file.txt', 'content'); // This should throw
|
||||
} catch (e) {
|
||||
threw = true;
|
||||
expect((e as Error).message).toContain('extraction mode');
|
||||
}
|
||||
expect(threw).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('should throw error when no source configured', async () => {
|
||||
let threw = false;
|
||||
try {
|
||||
await smartarchive.SmartArchive.create().extract('./output');
|
||||
} catch (e) {
|
||||
threw = true;
|
||||
expect((e as Error).message).toContain('No source configured');
|
||||
}
|
||||
expect(threw).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('should throw error when no format configured', async () => {
|
||||
let threw = false;
|
||||
try {
|
||||
await smartarchive.SmartArchive.create()
|
||||
.entry('file.txt', 'content')
|
||||
.toBuffer();
|
||||
} catch (e) {
|
||||
threw = true;
|
||||
expect((e as Error).message).toContain('No format specified');
|
||||
}
|
||||
expect(threw).toBeTrue();
|
||||
});
|
||||
|
||||
tap.skip.test('should extract a b2zip', async () => {
|
||||
const dataUrl =
|
||||
'https://daten.offeneregister.de/de_companies_ocdata.jsonl.bz2';
|
||||
const testArchive = await smartarchive.SmartArchive.fromUrl(dataUrl);
|
||||
await testArchive.extractToDirectory(
|
||||
plugins.path.join(testPaths.nogitDir, 'de_companies_ocdata.jsonl'),
|
||||
);
|
||||
await smartarchive.SmartArchive.create()
|
||||
.url(dataUrl)
|
||||
.extract(plugins.path.join(testPaths.nogitDir, 'de_companies_ocdata.jsonl'));
|
||||
});
|
||||
|
||||
await tap.start();
|
||||
export default tap.start();
|
||||
|
||||
@@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartarchive',
|
||||
version: '5.0.0',
|
||||
version: '5.2.0',
|
||||
description: 'A library for working with archive files, providing utilities for compressing and decompressing data.'
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { SmartArchive } from './classes.smartarchive.js';
|
||||
import type { TSupportedMime } from './interfaces.js';
|
||||
import type { TSupportedMime } from '../ts_shared/interfaces.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
/**
|
||||
@@ -8,7 +8,7 @@ import * as plugins from './plugins.js';
|
||||
export type TDecompressionStream =
|
||||
| plugins.stream.Transform
|
||||
| plugins.stream.Duplex
|
||||
| plugins.tarStream.Extract;
|
||||
| plugins.smartstream.SmartDuplex<any, any>;
|
||||
|
||||
/**
|
||||
* Result of archive analysis
|
||||
@@ -53,14 +53,42 @@ export class ArchiveAnalyzer {
|
||||
*/
|
||||
private async getDecompressionStream(mimeTypeArg: TSupportedMime): Promise<TDecompressionStream> {
|
||||
switch (mimeTypeArg) {
|
||||
case 'application/gzip':
|
||||
return this.smartArchiveRef.gzipTools.getDecompressionStream();
|
||||
case 'application/gzip': {
|
||||
// Use fflate streaming Gunzip - instance must be created once and reused
|
||||
let gunzip: plugins.fflate.Gunzip;
|
||||
return new plugins.stream.Transform({
|
||||
construct(callback) {
|
||||
gunzip = new plugins.fflate.Gunzip((data, final) => {
|
||||
this.push(Buffer.from(data));
|
||||
});
|
||||
callback();
|
||||
},
|
||||
transform(chunk, encoding, callback) {
|
||||
try {
|
||||
gunzip.push(chunk, false);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
},
|
||||
flush(callback) {
|
||||
try {
|
||||
// Signal end of stream with empty final chunk
|
||||
gunzip.push(new Uint8Array(0), true);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
case 'application/zip':
|
||||
return this.smartArchiveRef.zipTools.getDecompressionStream();
|
||||
case 'application/x-bzip2':
|
||||
return this.smartArchiveRef.bzip2Tools.getDecompressionStream();
|
||||
case 'application/x-tar':
|
||||
return this.smartArchiveRef.tarTools.getDecompressionStream();
|
||||
// TAR doesn't need decompression, just pass through
|
||||
return plugins.smartstream.createPassThrough();
|
||||
default:
|
||||
// Handle unsupported formats or no decompression needed
|
||||
return plugins.smartstream.createPassThrough();
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
import type { SmartArchive } from './classes.smartarchive.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
import { unbzip2Stream } from './bzip2/index.js';
|
||||
|
||||
export class Bzip2Tools {
|
||||
smartArchiveRef: SmartArchive;
|
||||
|
||||
constructor(smartArchiveRefArg: SmartArchive) {
|
||||
this.smartArchiveRef = smartArchiveRefArg;
|
||||
}
|
||||
|
||||
getDecompressionStream() {
|
||||
return unbzip2Stream();
|
||||
}
|
||||
}
|
||||
@@ -1,143 +0,0 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import type { TCompressionLevel } from './interfaces.js';
|
||||
|
||||
/**
|
||||
* Transform stream for GZIP compression using fflate
|
||||
*/
|
||||
export class GzipCompressionTransform extends plugins.stream.Transform {
|
||||
private gzip: plugins.fflate.Gzip;
|
||||
|
||||
constructor(level: TCompressionLevel = 6) {
|
||||
super();
|
||||
|
||||
// Create a streaming Gzip compressor
|
||||
this.gzip = new plugins.fflate.Gzip({ level }, (chunk, final) => {
|
||||
this.push(Buffer.from(chunk));
|
||||
if (final) {
|
||||
this.push(null);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_transform(
|
||||
chunk: Buffer,
|
||||
encoding: BufferEncoding,
|
||||
callback: plugins.stream.TransformCallback
|
||||
): void {
|
||||
try {
|
||||
this.gzip.push(chunk, false);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
}
|
||||
|
||||
_flush(callback: plugins.stream.TransformCallback): void {
|
||||
try {
|
||||
this.gzip.push(new Uint8Array(0), true);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform stream for GZIP decompression using fflate
|
||||
*/
|
||||
export class GzipDecompressionTransform extends plugins.stream.Transform {
|
||||
private gunzip: plugins.fflate.Gunzip;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
|
||||
// Create a streaming Gunzip decompressor
|
||||
this.gunzip = new plugins.fflate.Gunzip((chunk, final) => {
|
||||
this.push(Buffer.from(chunk));
|
||||
if (final) {
|
||||
this.push(null);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_transform(
|
||||
chunk: Buffer,
|
||||
encoding: BufferEncoding,
|
||||
callback: plugins.stream.TransformCallback
|
||||
): void {
|
||||
try {
|
||||
this.gunzip.push(chunk, false);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
}
|
||||
|
||||
_flush(callback: plugins.stream.TransformCallback): void {
|
||||
try {
|
||||
this.gunzip.push(new Uint8Array(0), true);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GZIP compression and decompression utilities
|
||||
*/
|
||||
export class GzipTools {
|
||||
/**
|
||||
* Get a streaming compression transform
|
||||
*/
|
||||
public getCompressionStream(level?: TCompressionLevel): plugins.stream.Transform {
|
||||
return new GzipCompressionTransform(level);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a streaming decompression transform
|
||||
*/
|
||||
public getDecompressionStream(): plugins.stream.Transform {
|
||||
return new GzipDecompressionTransform();
|
||||
}
|
||||
|
||||
/**
|
||||
* Compress data synchronously
|
||||
*/
|
||||
public compressSync(data: Buffer, level?: TCompressionLevel): Buffer {
|
||||
const options = level !== undefined ? { level } : undefined;
|
||||
return Buffer.from(plugins.fflate.gzipSync(data, options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Decompress data synchronously
|
||||
*/
|
||||
public decompressSync(data: Buffer): Buffer {
|
||||
return Buffer.from(plugins.fflate.gunzipSync(data));
|
||||
}
|
||||
|
||||
/**
|
||||
* Compress data asynchronously
|
||||
*/
|
||||
public async compress(data: Buffer, level?: TCompressionLevel): Promise<Buffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const options = level !== undefined ? { level } : undefined;
|
||||
plugins.fflate.gzip(data, options as plugins.fflate.AsyncGzipOptions, (err, result) => {
|
||||
if (err) reject(err);
|
||||
else resolve(Buffer.from(result));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Decompress data asynchronously
|
||||
*/
|
||||
public async decompress(data: Buffer): Promise<Buffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
plugins.fflate.gunzip(data, (err, result) => {
|
||||
if (err) reject(err);
|
||||
else resolve(Buffer.from(result));
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,208 +1,272 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import type { IArchiveEntry, TCompressionLevel } from './interfaces.js';
|
||||
import { GzipTools } from './classes.gziptools.js';
|
||||
import type { IArchiveEntry, TCompressionLevel } from '../ts_shared/interfaces.js';
|
||||
import { TarTools as SharedTarTools } from '../ts_shared/classes.tartools.js';
|
||||
|
||||
/**
|
||||
* TAR archive creation and extraction utilities
|
||||
* Options for adding a file to a TAR pack stream
|
||||
*/
|
||||
export class TarTools {
|
||||
export interface ITarPackFileOptions {
|
||||
fileName: string;
|
||||
content: string | Buffer | Uint8Array | plugins.stream.Readable;
|
||||
size?: number;
|
||||
mode?: number;
|
||||
mtime?: Date;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extended TAR archive utilities with Node.js streaming support
|
||||
*
|
||||
* For small archives: Use inherited buffer-based methods (packFiles, extractTar, etc.)
|
||||
* For large archives: Use streaming methods (getPackStream, getExtractStream, etc.)
|
||||
*/
|
||||
export class TarTools extends SharedTarTools {
|
||||
// ============================================
|
||||
// STREAMING PACK METHODS (for large files)
|
||||
// ============================================
|
||||
|
||||
/**
|
||||
* Add a file to a TAR pack stream
|
||||
* Get a streaming TAR pack instance
|
||||
* Use this for packing large files without buffering everything in memory
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const pack = tarTools.getPackStream();
|
||||
*
|
||||
* await tarTools.addFileToPack(pack, { fileName: 'large.bin', content: readStream, size: fileSize });
|
||||
* await tarTools.addFileToPack(pack, { fileName: 'small.txt', content: 'Hello World' });
|
||||
*
|
||||
* pack.finalize();
|
||||
* pack.pipe(fs.createWriteStream('output.tar'));
|
||||
* ```
|
||||
*/
|
||||
public async addFileToPack(
|
||||
pack: plugins.tarStream.Pack,
|
||||
optionsArg: {
|
||||
fileName?: string;
|
||||
content?:
|
||||
| string
|
||||
| Buffer
|
||||
| plugins.stream.Readable
|
||||
| plugins.smartfile.SmartFile
|
||||
| plugins.smartfile.StreamFile;
|
||||
byteLength?: number;
|
||||
filePath?: string;
|
||||
}
|
||||
): Promise<void> {
|
||||
return new Promise<void>(async (resolve, reject) => {
|
||||
let fileName: string | null = null;
|
||||
|
||||
if (optionsArg.fileName) {
|
||||
fileName = optionsArg.fileName;
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
|
||||
fileName = optionsArg.content.relative;
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
|
||||
fileName = optionsArg.content.relativeFilePath;
|
||||
} else if (optionsArg.filePath) {
|
||||
fileName = optionsArg.filePath;
|
||||
}
|
||||
|
||||
if (!fileName) {
|
||||
reject(new Error('No filename specified for TAR entry'));
|
||||
return;
|
||||
}
|
||||
|
||||
// Determine content byte length
|
||||
let contentByteLength: number | undefined;
|
||||
if (optionsArg.byteLength) {
|
||||
contentByteLength = optionsArg.byteLength;
|
||||
} else if (typeof optionsArg.content === 'string') {
|
||||
contentByteLength = Buffer.byteLength(optionsArg.content, 'utf8');
|
||||
} else if (Buffer.isBuffer(optionsArg.content)) {
|
||||
contentByteLength = optionsArg.content.length;
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
|
||||
contentByteLength = await optionsArg.content.getSize();
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
|
||||
contentByteLength = await optionsArg.content.getSize();
|
||||
} else if (optionsArg.filePath) {
|
||||
const fileStat = await plugins.fsPromises.stat(optionsArg.filePath);
|
||||
contentByteLength = fileStat.size;
|
||||
}
|
||||
|
||||
// Convert all content types to Readable stream
|
||||
let content: plugins.stream.Readable;
|
||||
if (Buffer.isBuffer(optionsArg.content)) {
|
||||
content = plugins.stream.Readable.from(optionsArg.content);
|
||||
} else if (typeof optionsArg.content === 'string') {
|
||||
content = plugins.stream.Readable.from(Buffer.from(optionsArg.content));
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
|
||||
content = plugins.stream.Readable.from(optionsArg.content.contents);
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
|
||||
content = await optionsArg.content.createReadStream();
|
||||
} else if (optionsArg.content instanceof plugins.stream.Readable) {
|
||||
content = optionsArg.content;
|
||||
} else if (optionsArg.filePath) {
|
||||
content = plugins.fs.createReadStream(optionsArg.filePath);
|
||||
} else {
|
||||
reject(new Error('No content or filePath specified for TAR entry'));
|
||||
return;
|
||||
}
|
||||
|
||||
const entry = pack.entry(
|
||||
{
|
||||
name: fileName,
|
||||
...(contentByteLength !== undefined ? { size: contentByteLength } : {}),
|
||||
},
|
||||
(err: Error | null) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
content.pipe(entry);
|
||||
// Note: resolve() is called in the callback above when pipe completes
|
||||
});
|
||||
public getPackStream(): plugins.tarStream.Pack {
|
||||
return plugins.tarStream.pack();
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack a directory into a TAR stream
|
||||
* Add a file to a TAR pack stream
|
||||
* Supports strings, buffers, and readable streams
|
||||
*
|
||||
* @param pack - The pack stream from getPackStream()
|
||||
* @param options - File options including name, content, and optional metadata
|
||||
*/
|
||||
public async packDirectory(directoryPath: string): Promise<plugins.tarStream.Pack> {
|
||||
const fileTree = await plugins.listFileTree(directoryPath, '**/*');
|
||||
const pack = await this.getPackStream();
|
||||
public async addFileToPack(
|
||||
pack: plugins.tarStream.Pack,
|
||||
options: ITarPackFileOptions
|
||||
): Promise<void> {
|
||||
const { fileName, content, mode = 0o644, mtime = new Date() } = options;
|
||||
|
||||
for (const filePath of fileTree) {
|
||||
const absolutePath = plugins.path.join(directoryPath, filePath);
|
||||
const fileStat = await plugins.fsPromises.stat(absolutePath);
|
||||
await this.addFileToPack(pack, {
|
||||
byteLength: fileStat.size,
|
||||
filePath: absolutePath,
|
||||
fileName: filePath,
|
||||
content: plugins.fs.createReadStream(absolutePath),
|
||||
if (typeof content === 'string') {
|
||||
// String content - convert to buffer
|
||||
const buffer = Buffer.from(content, 'utf8');
|
||||
const entry = pack.entry({
|
||||
name: fileName,
|
||||
size: buffer.length,
|
||||
mode,
|
||||
mtime,
|
||||
});
|
||||
entry.write(buffer);
|
||||
entry.end();
|
||||
} else if (Buffer.isBuffer(content) || content instanceof Uint8Array) {
|
||||
// Buffer content
|
||||
const buffer = Buffer.isBuffer(content) ? content : Buffer.from(content);
|
||||
const entry = pack.entry({
|
||||
name: fileName,
|
||||
size: buffer.length,
|
||||
mode,
|
||||
mtime,
|
||||
});
|
||||
entry.write(buffer);
|
||||
entry.end();
|
||||
} else if (content && typeof (content as any).pipe === 'function') {
|
||||
// Readable stream - requires size to be provided
|
||||
const size = options.size;
|
||||
if (size === undefined) {
|
||||
throw new Error('Size must be provided when adding a stream to TAR pack');
|
||||
}
|
||||
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const entry = pack.entry({
|
||||
name: fileName,
|
||||
size,
|
||||
mode,
|
||||
mtime,
|
||||
}, (err) => {
|
||||
if (err) reject(err);
|
||||
else resolve();
|
||||
});
|
||||
|
||||
(content as plugins.stream.Readable).pipe(entry);
|
||||
});
|
||||
} else {
|
||||
throw new Error('Unsupported content type for TAR entry');
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// STREAMING EXTRACT METHODS (for large files)
|
||||
// ============================================
|
||||
|
||||
/**
|
||||
* Get a streaming TAR extract instance
|
||||
* Use this for extracting large archives without buffering everything in memory
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const extract = tarTools.getExtractStream();
|
||||
*
|
||||
* extract.on('entry', (header, stream, next) => {
|
||||
* console.log(`Extracting: ${header.name}`);
|
||||
* stream.pipe(fs.createWriteStream(`./out/${header.name}`));
|
||||
* stream.on('end', next);
|
||||
* });
|
||||
*
|
||||
* fs.createReadStream('archive.tar').pipe(extract);
|
||||
* ```
|
||||
*/
|
||||
public getExtractStream(): plugins.tarStream.Extract {
|
||||
return plugins.tarStream.extract();
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a TAR stream to a directory with true streaming (no buffering)
|
||||
*
|
||||
* @param sourceStream - The TAR archive stream
|
||||
* @param targetDir - Directory to extract files to
|
||||
*/
|
||||
public async extractToDirectory(
|
||||
sourceStream: plugins.stream.Readable,
|
||||
targetDir: string
|
||||
): Promise<void> {
|
||||
await plugins.fsPromises.mkdir(targetDir, { recursive: true });
|
||||
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const extract = this.getExtractStream();
|
||||
|
||||
extract.on('entry', async (header, stream, next) => {
|
||||
const filePath = plugins.path.join(targetDir, header.name);
|
||||
|
||||
if (header.type === 'directory') {
|
||||
await plugins.fsPromises.mkdir(filePath, { recursive: true });
|
||||
stream.resume(); // Drain the stream
|
||||
next();
|
||||
} else if (header.type === 'file') {
|
||||
await plugins.fsPromises.mkdir(plugins.path.dirname(filePath), { recursive: true });
|
||||
const writeStream = plugins.fs.createWriteStream(filePath);
|
||||
stream.pipe(writeStream);
|
||||
writeStream.on('finish', next);
|
||||
writeStream.on('error', reject);
|
||||
} else {
|
||||
stream.resume(); // Skip other types
|
||||
next();
|
||||
}
|
||||
});
|
||||
|
||||
extract.on('finish', resolve);
|
||||
extract.on('error', reject);
|
||||
|
||||
sourceStream.pipe(extract);
|
||||
});
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// STREAMING DIRECTORY PACK (for large directories)
|
||||
// ============================================
|
||||
|
||||
/**
|
||||
* Pack a directory into a TAR stream with true streaming (no buffering)
|
||||
* Files are read and written one at a time, never loading everything into memory
|
||||
*/
|
||||
public async getDirectoryPackStream(directoryPath: string): Promise<plugins.tarStream.Pack> {
|
||||
const pack = this.getPackStream();
|
||||
const fileTree = await plugins.listFileTree(directoryPath, '**/*');
|
||||
|
||||
// Process files sequentially to avoid memory issues
|
||||
(async () => {
|
||||
for (const filePath of fileTree) {
|
||||
const absolutePath = plugins.path.join(directoryPath, filePath);
|
||||
const stat = await plugins.fsPromises.stat(absolutePath);
|
||||
|
||||
if (stat.isFile()) {
|
||||
const readStream = plugins.fs.createReadStream(absolutePath);
|
||||
await this.addFileToPack(pack, {
|
||||
fileName: filePath,
|
||||
content: readStream,
|
||||
size: stat.size,
|
||||
mode: stat.mode,
|
||||
mtime: stat.mtime,
|
||||
});
|
||||
}
|
||||
}
|
||||
pack.finalize();
|
||||
})().catch((err) => pack.destroy(err));
|
||||
|
||||
return pack;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a new TAR pack stream
|
||||
* Pack a directory into a TAR.GZ stream with true streaming
|
||||
* Uses Node.js zlib for streaming compression
|
||||
*/
|
||||
public async getPackStream(): Promise<plugins.tarStream.Pack> {
|
||||
return plugins.tarStream.pack();
|
||||
public async getDirectoryPackStreamGz(
|
||||
directoryPath: string,
|
||||
compressionLevel?: TCompressionLevel
|
||||
): Promise<plugins.stream.Readable> {
|
||||
const tarStream = await this.getDirectoryPackStream(directoryPath);
|
||||
const { createGzip } = await import('node:zlib');
|
||||
const gzip = createGzip({ level: compressionLevel ?? 6 });
|
||||
return tarStream.pipe(gzip);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a TAR extraction stream
|
||||
*/
|
||||
public getDecompressionStream(): plugins.tarStream.Extract {
|
||||
return plugins.tarStream.extract();
|
||||
}
|
||||
// ============================================
|
||||
// BUFFER-BASED METHODS (inherited + filesystem)
|
||||
// ============================================
|
||||
|
||||
/**
|
||||
* Pack files into a TAR buffer
|
||||
* Pack a directory into a TAR buffer (loads all files into memory)
|
||||
* For large directories, use getDirectoryPackStream() instead
|
||||
*/
|
||||
public async packFiles(files: IArchiveEntry[]): Promise<Buffer> {
|
||||
const pack = await this.getPackStream();
|
||||
public async packDirectory(directoryPath: string): Promise<Uint8Array> {
|
||||
const fileTree = await plugins.listFileTree(directoryPath, '**/*');
|
||||
const entries: IArchiveEntry[] = [];
|
||||
|
||||
for (const file of files) {
|
||||
await this.addFileToPack(pack, {
|
||||
fileName: file.archivePath,
|
||||
content: file.content as string | Buffer | plugins.stream.Readable | plugins.smartfile.SmartFile | plugins.smartfile.StreamFile,
|
||||
byteLength: file.size,
|
||||
});
|
||||
for (const filePath of fileTree) {
|
||||
const absolutePath = plugins.path.join(directoryPath, filePath);
|
||||
const stat = await plugins.fsPromises.stat(absolutePath);
|
||||
|
||||
if (stat.isFile()) {
|
||||
const content = await plugins.fsPromises.readFile(absolutePath);
|
||||
entries.push({
|
||||
archivePath: filePath,
|
||||
content: new Uint8Array(content),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pack.finalize();
|
||||
|
||||
const chunks: Buffer[] = [];
|
||||
return new Promise((resolve, reject) => {
|
||||
pack.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
pack.on('end', () => resolve(Buffer.concat(chunks)));
|
||||
pack.on('error', reject);
|
||||
});
|
||||
return this.packFiles(entries);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack a directory into a TAR.GZ buffer
|
||||
* Pack a directory into a TAR.GZ buffer (loads all files into memory)
|
||||
* For large directories, use getDirectoryPackStreamGz() instead
|
||||
*/
|
||||
public async packDirectoryToTarGz(
|
||||
directoryPath: string,
|
||||
compressionLevel?: TCompressionLevel
|
||||
): Promise<Buffer> {
|
||||
const pack = await this.packDirectory(directoryPath);
|
||||
pack.finalize();
|
||||
|
||||
const gzipTools = new GzipTools();
|
||||
const gzipStream = gzipTools.getCompressionStream(compressionLevel);
|
||||
|
||||
const chunks: Buffer[] = [];
|
||||
return new Promise((resolve, reject) => {
|
||||
pack
|
||||
.pipe(gzipStream)
|
||||
.on('data', (chunk: Buffer) => chunks.push(chunk))
|
||||
.on('end', () => resolve(Buffer.concat(chunks)))
|
||||
.on('error', reject);
|
||||
});
|
||||
): Promise<Uint8Array> {
|
||||
const tarBuffer = await this.packDirectory(directoryPath);
|
||||
const { gzipSync } = await import('fflate');
|
||||
return gzipSync(new Uint8Array(tarBuffer), { level: compressionLevel ?? 6 });
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack a directory into a TAR.GZ stream
|
||||
* This is now a true streaming implementation
|
||||
*/
|
||||
public async packDirectoryToTarGzStream(
|
||||
directoryPath: string,
|
||||
compressionLevel?: TCompressionLevel
|
||||
): Promise<plugins.stream.Readable> {
|
||||
const pack = await this.packDirectory(directoryPath);
|
||||
pack.finalize();
|
||||
|
||||
const gzipTools = new GzipTools();
|
||||
const gzipStream = gzipTools.getCompressionStream(compressionLevel);
|
||||
|
||||
return pack.pipe(gzipStream);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack files into a TAR.GZ buffer
|
||||
*/
|
||||
public async packFilesToTarGz(
|
||||
files: IArchiveEntry[],
|
||||
compressionLevel?: TCompressionLevel
|
||||
): Promise<Buffer> {
|
||||
const tarBuffer = await this.packFiles(files);
|
||||
const gzipTools = new GzipTools();
|
||||
return gzipTools.compress(tarBuffer, compressionLevel);
|
||||
return this.getDirectoryPackStreamGz(directoryPath, compressionLevel);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,209 +0,0 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import type { IArchiveEntry, TCompressionLevel } from './interfaces.js';
|
||||
|
||||
/**
|
||||
* Transform stream for ZIP decompression using fflate
|
||||
* Emits StreamFile objects for each file in the archive
|
||||
*/
|
||||
export class ZipDecompressionTransform extends plugins.smartstream.SmartDuplex<Buffer, plugins.smartfile.StreamFile> {
|
||||
private streamtools!: plugins.smartstream.IStreamTools;
|
||||
private unzipper = new plugins.fflate.Unzip(async (fileArg) => {
|
||||
let resultBuffer: Buffer;
|
||||
fileArg.ondata = async (_flateError, dat, final) => {
|
||||
resultBuffer
|
||||
? (resultBuffer = Buffer.concat([resultBuffer, Buffer.from(dat)]))
|
||||
: (resultBuffer = Buffer.from(dat));
|
||||
if (final) {
|
||||
const streamFile = plugins.smartfile.StreamFile.fromBuffer(resultBuffer);
|
||||
streamFile.relativeFilePath = fileArg.name;
|
||||
this.streamtools.push(streamFile);
|
||||
}
|
||||
};
|
||||
fileArg.start();
|
||||
});
|
||||
|
||||
constructor() {
|
||||
super({
|
||||
objectMode: true,
|
||||
writeFunction: async (chunkArg, streamtoolsArg) => {
|
||||
this.streamtools ? null : (this.streamtools = streamtoolsArg);
|
||||
this.unzipper.push(
|
||||
Buffer.isBuffer(chunkArg) ? chunkArg : Buffer.from(chunkArg as unknown as ArrayBuffer),
|
||||
false
|
||||
);
|
||||
return null;
|
||||
},
|
||||
finalFunction: async () => {
|
||||
this.unzipper.push(Buffer.from(''), true);
|
||||
await plugins.smartdelay.delayFor(0);
|
||||
await this.streamtools.push(null);
|
||||
return null;
|
||||
},
|
||||
});
|
||||
this.unzipper.register(plugins.fflate.UnzipInflate);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Streaming ZIP compression using fflate
|
||||
* Allows adding multiple entries before finalizing
|
||||
*/
|
||||
export class ZipCompressionStream extends plugins.stream.Duplex {
|
||||
private files: Map<string, { data: Uint8Array; options?: plugins.fflate.ZipOptions }> = new Map();
|
||||
private finalized = false;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a file entry to the ZIP archive
|
||||
*/
|
||||
public async addEntry(
|
||||
fileName: string,
|
||||
content: Buffer | plugins.stream.Readable,
|
||||
options?: { compressionLevel?: TCompressionLevel }
|
||||
): Promise<void> {
|
||||
if (this.finalized) {
|
||||
throw new Error('Cannot add entries to a finalized ZIP archive');
|
||||
}
|
||||
|
||||
let data: Buffer;
|
||||
if (Buffer.isBuffer(content)) {
|
||||
data = content;
|
||||
} else {
|
||||
// Collect stream to buffer
|
||||
const chunks: Buffer[] = [];
|
||||
for await (const chunk of content) {
|
||||
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
||||
}
|
||||
data = Buffer.concat(chunks);
|
||||
}
|
||||
|
||||
this.files.set(fileName, {
|
||||
data: new Uint8Array(data),
|
||||
options: options?.compressionLevel !== undefined ? { level: options.compressionLevel } : undefined,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Finalize the ZIP archive and emit the compressed data
|
||||
*/
|
||||
public async finalize(): Promise<void> {
|
||||
if (this.finalized) {
|
||||
return;
|
||||
}
|
||||
this.finalized = true;
|
||||
|
||||
const filesObj: plugins.fflate.Zippable = {};
|
||||
for (const [name, { data, options }] of this.files) {
|
||||
filesObj[name] = options ? [data, options] : data;
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
plugins.fflate.zip(filesObj, (err, result) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
this.push(Buffer.from(result));
|
||||
this.push(null);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_read(): void {
|
||||
// No-op: data is pushed when finalize() is called
|
||||
}
|
||||
|
||||
_write(
|
||||
_chunk: Buffer,
|
||||
_encoding: BufferEncoding,
|
||||
callback: (error?: Error | null) => void
|
||||
): void {
|
||||
// Not used for ZIP creation - use addEntry() instead
|
||||
callback(new Error('Use addEntry() to add files to the ZIP archive'));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* ZIP compression and decompression utilities
|
||||
*/
|
||||
export class ZipTools {
|
||||
/**
|
||||
* Get a streaming compression object for creating ZIP archives
|
||||
*/
|
||||
public getCompressionStream(): ZipCompressionStream {
|
||||
return new ZipCompressionStream();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a streaming decompression transform for extracting ZIP archives
|
||||
*/
|
||||
public getDecompressionStream(): ZipDecompressionTransform {
|
||||
return new ZipDecompressionTransform();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a ZIP archive from an array of entries
|
||||
*/
|
||||
public async createZip(entries: IArchiveEntry[], compressionLevel?: TCompressionLevel): Promise<Buffer> {
|
||||
const filesObj: plugins.fflate.Zippable = {};
|
||||
|
||||
for (const entry of entries) {
|
||||
let data: Uint8Array;
|
||||
|
||||
if (typeof entry.content === 'string') {
|
||||
data = new TextEncoder().encode(entry.content);
|
||||
} else if (Buffer.isBuffer(entry.content)) {
|
||||
data = new Uint8Array(entry.content);
|
||||
} else if (entry.content instanceof plugins.smartfile.SmartFile) {
|
||||
data = new Uint8Array(entry.content.contents);
|
||||
} else if (entry.content instanceof plugins.smartfile.StreamFile) {
|
||||
const buffer = await entry.content.getContentAsBuffer();
|
||||
data = new Uint8Array(buffer);
|
||||
} else {
|
||||
// Readable stream
|
||||
const chunks: Buffer[] = [];
|
||||
for await (const chunk of entry.content as plugins.stream.Readable) {
|
||||
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
||||
}
|
||||
data = new Uint8Array(Buffer.concat(chunks));
|
||||
}
|
||||
|
||||
if (compressionLevel !== undefined) {
|
||||
filesObj[entry.archivePath] = [data, { level: compressionLevel }];
|
||||
} else {
|
||||
filesObj[entry.archivePath] = data;
|
||||
}
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
plugins.fflate.zip(filesObj, (err, result) => {
|
||||
if (err) reject(err);
|
||||
else resolve(Buffer.from(result));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a ZIP buffer to an array of entries
|
||||
*/
|
||||
public async extractZip(data: Buffer): Promise<Array<{ path: string; content: Buffer }>> {
|
||||
return new Promise((resolve, reject) => {
|
||||
plugins.fflate.unzip(data, (err, result) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
const entries: Array<{ path: string; content: Buffer }> = [];
|
||||
for (const [path, content] of Object.entries(result)) {
|
||||
entries.push({ path, content: Buffer.from(content) });
|
||||
}
|
||||
resolve(entries);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
18
ts/index.ts
18
ts/index.ts
@@ -1,15 +1,11 @@
|
||||
// Core types and errors
|
||||
export * from './interfaces.js';
|
||||
export * from './errors.js';
|
||||
// Re-export everything from ts_shared (browser-compatible)
|
||||
export * from '../ts_shared/index.js';
|
||||
|
||||
// Main archive class
|
||||
// Node.js-specific: Main archive class with filesystem support
|
||||
export * from './classes.smartarchive.js';
|
||||
|
||||
// Format-specific tools
|
||||
export * from './classes.tartools.js';
|
||||
export * from './classes.ziptools.js';
|
||||
export * from './classes.gziptools.js';
|
||||
export * from './classes.bzip2tools.js';
|
||||
|
||||
// Archive analysis
|
||||
// Node.js-specific: Archive analysis with SmartArchive integration
|
||||
export * from './classes.archiveanalyzer.js';
|
||||
|
||||
// Node.js-specific: Extended TarTools with streaming support (overrides shared TarTools)
|
||||
export { TarTools, type ITarPackFileOptions } from './classes.tartools.js';
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// node native scope
|
||||
// Node.js native scope
|
||||
import * as path from 'node:path';
|
||||
import * as stream from 'node:stream';
|
||||
import * as fs from 'node:fs';
|
||||
@@ -30,32 +30,24 @@ export async function listFileTree(dirPath: string, _pattern: string = '**/*'):
|
||||
return results;
|
||||
}
|
||||
|
||||
// @pushrocks scope
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartdelay from '@push.rocks/smartdelay';
|
||||
// Re-export browser-compatible plugins from ts_shared
|
||||
export * from '../ts_shared/plugins.js';
|
||||
|
||||
// Additional Node.js-specific @pushrocks packages
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smartrequest from '@push.rocks/smartrequest';
|
||||
import * as smartunique from '@push.rocks/smartunique';
|
||||
import * as smartstream from '@push.rocks/smartstream';
|
||||
import * as smartrx from '@push.rocks/smartrx';
|
||||
import * as smarturl from '@push.rocks/smarturl';
|
||||
|
||||
export {
|
||||
smartfile,
|
||||
smartdelay,
|
||||
smartpath,
|
||||
smartpromise,
|
||||
smartrequest,
|
||||
smartunique,
|
||||
smartstream,
|
||||
smartrx,
|
||||
smarturl,
|
||||
};
|
||||
|
||||
// third party scope
|
||||
import * as fileType from 'file-type';
|
||||
import * as fflate from 'fflate';
|
||||
import tarStream from 'tar-stream';
|
||||
|
||||
export { fileType, fflate, tarStream };
|
||||
// Node.js-specific: tar-stream for true streaming TAR support
|
||||
import * as tarStream from 'tar-stream';
|
||||
export { tarStream };
|
||||
|
||||
@@ -6,7 +6,7 @@ const BITMASK = [0, 0x01, 0x03, 0x07, 0x0f, 0x1f, 0x3f, 0x7f, 0xff] as const;
|
||||
* Creates a bit reader function for BZIP2 decompression.
|
||||
* Takes a buffer iterator as input and returns a function that reads bits.
|
||||
*/
|
||||
export function bitIterator(nextBuffer: () => Buffer): IBitReader {
|
||||
export function bitIterator(nextBuffer: () => Uint8Array): IBitReader {
|
||||
let bit = 0;
|
||||
let byte = 0;
|
||||
let bytes = nextBuffer();
|
||||
@@ -71,7 +71,7 @@ export class Bzip2 {
|
||||
/**
|
||||
* Create a bit reader from a byte array
|
||||
*/
|
||||
array(bytes: Uint8Array | Buffer): (n: number) => number {
|
||||
array(bytes: Uint8Array): (n: number) => number {
|
||||
let bit = 0;
|
||||
let byte = 0;
|
||||
const BITMASK = [0, 0x01, 0x03, 0x07, 0x0f, 0x1f, 0x3f, 0x7f, 0xff];
|
||||
@@ -99,7 +99,7 @@ export class Bzip2 {
|
||||
/**
|
||||
* Simple decompression from a buffer
|
||||
*/
|
||||
simple(srcbuffer: Uint8Array | Buffer, stream: (byte: number) => void): void {
|
||||
simple(srcbuffer: Uint8Array, stream: (byte: number) => void): void {
|
||||
const bits = this.array(srcbuffer);
|
||||
const size = this.header(bits as IBitReader);
|
||||
let ret: number | null = 0;
|
||||
@@ -8,16 +8,16 @@ import { bitIterator } from './bititerator.js';
|
||||
/**
|
||||
* Creates a streaming BZIP2 decompression transform
|
||||
*/
|
||||
export function unbzip2Stream(): plugins.smartstream.SmartDuplex<Buffer, Buffer> {
|
||||
export function unbzip2Stream(): plugins.smartstream.SmartDuplex<Uint8Array, Uint8Array> {
|
||||
const bzip2Instance = new Bzip2();
|
||||
const bufferQueue: Buffer[] = [];
|
||||
const bufferQueue: Uint8Array[] = [];
|
||||
let hasBytes = 0;
|
||||
let blockSize = 0;
|
||||
let broken = false;
|
||||
let bitReader: IBitReader | null = null;
|
||||
let streamCRC: number | null = null;
|
||||
|
||||
function decompressBlock(): Buffer | undefined {
|
||||
function decompressBlock(): Uint8Array | undefined {
|
||||
if (!blockSize) {
|
||||
blockSize = bzip2Instance.header(bitReader!);
|
||||
streamCRC = 0;
|
||||
@@ -40,12 +40,12 @@ export function unbzip2Stream(): plugins.smartstream.SmartDuplex<Buffer, Buffer>
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return Buffer.from(chunk);
|
||||
return new Uint8Array(chunk);
|
||||
}
|
||||
|
||||
let outlength = 0;
|
||||
|
||||
const decompressAndPush = async (): Promise<Buffer | undefined> => {
|
||||
const decompressAndPush = async (): Promise<Uint8Array | undefined> => {
|
||||
if (broken) return undefined;
|
||||
|
||||
try {
|
||||
@@ -63,7 +63,7 @@ export function unbzip2Stream(): plugins.smartstream.SmartDuplex<Buffer, Buffer>
|
||||
}
|
||||
};
|
||||
|
||||
return new plugins.smartstream.SmartDuplex<Buffer, Buffer>({
|
||||
return new plugins.smartstream.SmartDuplex<Uint8Array, Uint8Array>({
|
||||
objectMode: true,
|
||||
name: 'bzip2',
|
||||
highWaterMark: 1,
|
||||
14
ts_shared/classes.bzip2tools.ts
Normal file
14
ts_shared/classes.bzip2tools.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import { unbzip2Stream } from './bzip2/index.js';
|
||||
|
||||
/**
|
||||
* BZIP2 decompression utilities (browser-compatible)
|
||||
*/
|
||||
export class Bzip2Tools {
|
||||
/**
|
||||
* Get a streaming decompression transform
|
||||
*/
|
||||
getDecompressionStream(): plugins.smartstream.SmartDuplex<Uint8Array, Uint8Array> {
|
||||
return unbzip2Stream();
|
||||
}
|
||||
}
|
||||
42
ts_shared/classes.gziptools.ts
Normal file
42
ts_shared/classes.gziptools.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import type { TCompressionLevel } from './interfaces.js';
|
||||
|
||||
/**
|
||||
* GZIP compression and decompression utilities (browser-compatible)
|
||||
*/
|
||||
export class GzipTools {
|
||||
/**
|
||||
* Compress data synchronously
|
||||
*/
|
||||
public compressSync(data: Uint8Array, level?: TCompressionLevel): Uint8Array {
|
||||
const options = level !== undefined ? { level } : undefined;
|
||||
return plugins.fflate.gzipSync(data, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decompress data synchronously
|
||||
*/
|
||||
public decompressSync(data: Uint8Array): Uint8Array {
|
||||
return plugins.fflate.gunzipSync(data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compress data asynchronously
|
||||
* Note: Uses sync version for Deno compatibility (fflate async uses Web Workers
|
||||
* which have issues in Deno)
|
||||
*/
|
||||
public async compress(data: Uint8Array, level?: TCompressionLevel): Promise<Uint8Array> {
|
||||
// Use sync version wrapped in Promise for cross-runtime compatibility
|
||||
return this.compressSync(data, level);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decompress data asynchronously
|
||||
* Note: Uses sync version for Deno compatibility (fflate async uses Web Workers
|
||||
* which have issues in Deno)
|
||||
*/
|
||||
public async decompress(data: Uint8Array): Promise<Uint8Array> {
|
||||
// Use sync version wrapped in Promise for cross-runtime compatibility
|
||||
return this.decompressSync(data);
|
||||
}
|
||||
}
|
||||
89
ts_shared/classes.tartools.ts
Normal file
89
ts_shared/classes.tartools.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import type { IArchiveEntry, ITarEntry, TCompressionLevel } from './interfaces.js';
|
||||
import { GzipTools } from './classes.gziptools.js';
|
||||
|
||||
/**
|
||||
* TAR archive creation and extraction utilities using modern-tar (browser-compatible)
|
||||
*/
|
||||
export class TarTools {
|
||||
/**
|
||||
* Pack files into a TAR buffer
|
||||
*/
|
||||
public async packFiles(files: IArchiveEntry[]): Promise<Uint8Array> {
|
||||
const entries: ITarEntry[] = [];
|
||||
|
||||
for (const file of files) {
|
||||
let data: Uint8Array;
|
||||
|
||||
if (typeof file.content === 'string') {
|
||||
data = new TextEncoder().encode(file.content);
|
||||
} else if (file.content instanceof Uint8Array) {
|
||||
data = file.content;
|
||||
} else if (file.content instanceof plugins.smartfile.SmartFile) {
|
||||
data = new Uint8Array(file.content.contents);
|
||||
} else if (file.content instanceof plugins.smartfile.StreamFile) {
|
||||
const buffer = await file.content.getContentAsBuffer();
|
||||
data = new Uint8Array(buffer);
|
||||
} else {
|
||||
throw new Error('Unsupported content type for TAR entry');
|
||||
}
|
||||
|
||||
entries.push({
|
||||
header: {
|
||||
name: file.archivePath,
|
||||
size: data.length,
|
||||
type: 'file',
|
||||
mode: file.mode,
|
||||
mtime: file.mtime,
|
||||
},
|
||||
body: data,
|
||||
});
|
||||
}
|
||||
|
||||
return plugins.modernTar.packTar(entries);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a TAR buffer to an array of entries
|
||||
*/
|
||||
public async extractTar(data: Uint8Array): Promise<Array<{ path: string; content: Uint8Array; isDirectory: boolean }>> {
|
||||
const entries = await plugins.modernTar.unpackTar(data);
|
||||
const result: Array<{ path: string; content: Uint8Array; isDirectory: boolean }> = [];
|
||||
|
||||
for (const entry of entries) {
|
||||
const isDirectory = entry.header.type === 'directory' || entry.header.name.endsWith('/');
|
||||
|
||||
// modern-tar uses 'data' property, not 'body'
|
||||
const content = entry.data ?? new Uint8Array(0);
|
||||
|
||||
result.push({
|
||||
path: entry.header.name,
|
||||
content,
|
||||
isDirectory,
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack files into a TAR.GZ buffer
|
||||
*/
|
||||
public async packFilesToTarGz(
|
||||
files: IArchiveEntry[],
|
||||
compressionLevel?: TCompressionLevel
|
||||
): Promise<Uint8Array> {
|
||||
const tarBuffer = await this.packFiles(files);
|
||||
const gzipTools = new GzipTools();
|
||||
return gzipTools.compress(tarBuffer, compressionLevel);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a TAR.GZ buffer to an array of entries
|
||||
*/
|
||||
public async extractTarGz(data: Uint8Array): Promise<Array<{ path: string; content: Uint8Array; isDirectory: boolean }>> {
|
||||
const gzipTools = new GzipTools();
|
||||
const tarBuffer = await gzipTools.decompress(data);
|
||||
return this.extractTar(tarBuffer);
|
||||
}
|
||||
}
|
||||
107
ts_shared/classes.ziptools.ts
Normal file
107
ts_shared/classes.ziptools.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import type { IArchiveEntry, TCompressionLevel } from './interfaces.js';
|
||||
|
||||
/**
|
||||
* Transform stream for ZIP decompression using fflate
|
||||
* Emits StreamFile objects for each file in the archive
|
||||
*/
|
||||
export class ZipDecompressionTransform extends plugins.smartstream.SmartDuplex<Uint8Array, plugins.smartfile.StreamFile> {
|
||||
private streamtools!: plugins.smartstream.IStreamTools;
|
||||
private unzipper = new plugins.fflate.Unzip(async (fileArg) => {
|
||||
let resultBuffer: Uint8Array;
|
||||
fileArg.ondata = async (_flateError, dat, final) => {
|
||||
if (resultBuffer) {
|
||||
const combined = new Uint8Array(resultBuffer.length + dat.length);
|
||||
combined.set(resultBuffer);
|
||||
combined.set(dat, resultBuffer.length);
|
||||
resultBuffer = combined;
|
||||
} else {
|
||||
resultBuffer = new Uint8Array(dat);
|
||||
}
|
||||
if (final) {
|
||||
const streamFile = plugins.smartfile.StreamFile.fromBuffer(Buffer.from(resultBuffer));
|
||||
streamFile.relativeFilePath = fileArg.name;
|
||||
this.streamtools.push(streamFile);
|
||||
}
|
||||
};
|
||||
fileArg.start();
|
||||
});
|
||||
|
||||
constructor() {
|
||||
super({
|
||||
objectMode: true,
|
||||
writeFunction: async (chunkArg, streamtoolsArg) => {
|
||||
this.streamtools ? null : (this.streamtools = streamtoolsArg);
|
||||
const chunk = chunkArg instanceof Uint8Array ? chunkArg : new Uint8Array(chunkArg);
|
||||
this.unzipper.push(chunk, false);
|
||||
return null;
|
||||
},
|
||||
finalFunction: async () => {
|
||||
this.unzipper.push(new Uint8Array(0), true);
|
||||
await plugins.smartdelay.delayFor(0);
|
||||
await this.streamtools.push(null);
|
||||
return null;
|
||||
},
|
||||
});
|
||||
this.unzipper.register(plugins.fflate.UnzipInflate);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* ZIP compression and decompression utilities
|
||||
*/
|
||||
export class ZipTools {
|
||||
/**
|
||||
* Get a streaming decompression transform for extracting ZIP archives
|
||||
*/
|
||||
public getDecompressionStream(): ZipDecompressionTransform {
|
||||
return new ZipDecompressionTransform();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a ZIP archive from an array of entries
|
||||
*/
|
||||
public async createZip(entries: IArchiveEntry[], compressionLevel?: TCompressionLevel): Promise<Uint8Array> {
|
||||
const filesObj: plugins.fflate.Zippable = {};
|
||||
|
||||
for (const entry of entries) {
|
||||
let data: Uint8Array;
|
||||
|
||||
if (typeof entry.content === 'string') {
|
||||
data = new TextEncoder().encode(entry.content);
|
||||
} else if (entry.content instanceof Uint8Array) {
|
||||
data = entry.content;
|
||||
} else if (entry.content instanceof plugins.smartfile.SmartFile) {
|
||||
data = new Uint8Array(entry.content.contents);
|
||||
} else if (entry.content instanceof plugins.smartfile.StreamFile) {
|
||||
const buffer = await entry.content.getContentAsBuffer();
|
||||
data = new Uint8Array(buffer);
|
||||
} else {
|
||||
throw new Error('Unsupported content type for ZIP entry');
|
||||
}
|
||||
|
||||
if (compressionLevel !== undefined) {
|
||||
filesObj[entry.archivePath] = [data, { level: compressionLevel }];
|
||||
} else {
|
||||
filesObj[entry.archivePath] = data;
|
||||
}
|
||||
}
|
||||
|
||||
// Use sync version for Deno compatibility (fflate async uses Web Workers)
|
||||
const result = plugins.fflate.zipSync(filesObj);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a ZIP buffer to an array of entries
|
||||
*/
|
||||
public async extractZip(data: Uint8Array): Promise<Array<{ path: string; content: Uint8Array }>> {
|
||||
// Use sync version for Deno compatibility (fflate async uses Web Workers)
|
||||
const result = plugins.fflate.unzipSync(data);
|
||||
const entries: Array<{ path: string; content: Uint8Array }> = [];
|
||||
for (const [path, content] of Object.entries(result)) {
|
||||
entries.push({ path, content });
|
||||
}
|
||||
return entries;
|
||||
}
|
||||
}
|
||||
17
ts_shared/index.ts
Normal file
17
ts_shared/index.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
// ts_shared - Browser-compatible shared code
|
||||
|
||||
// Interfaces and types
|
||||
export * from './interfaces.js';
|
||||
|
||||
// Error classes
|
||||
export * from './errors.js';
|
||||
|
||||
// Tool classes
|
||||
export { ZipTools, ZipDecompressionTransform } from './classes.ziptools.js';
|
||||
export { GzipTools } from './classes.gziptools.js';
|
||||
export { TarTools } from './classes.tartools.js';
|
||||
export { Bzip2Tools } from './classes.bzip2tools.js';
|
||||
|
||||
// BZIP2 internals (for advanced usage)
|
||||
export { unbzip2Stream } from './bzip2/index.js';
|
||||
export { Bzip2 } from './bzip2/bzip2.js';
|
||||
@@ -1,4 +1,3 @@
|
||||
import type * as stream from 'node:stream';
|
||||
import type { SmartFile, StreamFile } from '@push.rocks/smartfile';
|
||||
|
||||
/**
|
||||
@@ -22,13 +21,13 @@ export type TSupportedMime =
|
||||
| undefined;
|
||||
|
||||
/**
|
||||
* Entry to add to an archive during creation
|
||||
* Entry to add to an archive during creation (browser-compatible)
|
||||
*/
|
||||
export interface IArchiveEntry {
|
||||
/** Path within the archive */
|
||||
archivePath: string;
|
||||
/** Content: string, Buffer, Readable stream, SmartFile, or StreamFile */
|
||||
content: string | Buffer | stream.Readable | SmartFile | StreamFile;
|
||||
/** Content: string, Buffer/Uint8Array, SmartFile, or StreamFile */
|
||||
content: string | Uint8Array | SmartFile | StreamFile;
|
||||
/** Optional size hint for streams (improves performance) */
|
||||
size?: number;
|
||||
/** Optional file mode/permissions */
|
||||
@@ -104,11 +103,9 @@ export interface IAddFileOptions {
|
||||
/** Filename within the archive */
|
||||
fileName?: string;
|
||||
/** File content */
|
||||
content?: string | Buffer | stream.Readable | SmartFile | StreamFile;
|
||||
content?: string | Uint8Array | SmartFile | StreamFile;
|
||||
/** Size in bytes (required for streams) */
|
||||
byteLength?: number;
|
||||
/** Path to file on disk (alternative to content) */
|
||||
filePath?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -129,3 +126,22 @@ export interface IHuffmanGroup {
|
||||
minLen: number;
|
||||
maxLen: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Entry filter predicate for fluent API
|
||||
*/
|
||||
export type TEntryFilter = (entry: IArchiveEntryInfo) => boolean;
|
||||
|
||||
/**
|
||||
* TAR entry for modern-tar compatibility
|
||||
*/
|
||||
export interface ITarEntry {
|
||||
header: {
|
||||
name: string;
|
||||
size: number;
|
||||
type?: 'file' | 'directory';
|
||||
mode?: number;
|
||||
mtime?: Date;
|
||||
};
|
||||
body: string | Uint8Array;
|
||||
}
|
||||
22
ts_shared/plugins.ts
Normal file
22
ts_shared/plugins.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
// Browser-compatible plugins for ts_shared
|
||||
// NO Node.js imports allowed here
|
||||
|
||||
// @push.rocks scope (browser-compatible)
|
||||
import * as smartdelay from '@push.rocks/smartdelay';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smartstream from '@push.rocks/smartstream';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
|
||||
export {
|
||||
smartdelay,
|
||||
smartpromise,
|
||||
smartstream,
|
||||
smartfile,
|
||||
};
|
||||
|
||||
// third party scope (browser-compatible)
|
||||
import * as fileType from 'file-type';
|
||||
import * as fflate from 'fflate';
|
||||
import * as modernTar from 'modern-tar';
|
||||
|
||||
export { fileType, fflate, modernTar };
|
||||
8
ts_web/00_commitinfo_data.ts
Normal file
8
ts_web/00_commitinfo_data.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
/**
|
||||
* autocreated commitinfo by @push.rocks/commitinfo
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartarchive',
|
||||
version: '5.2.0',
|
||||
description: 'A library for working with archive files, providing utilities for compressing and decompressing data.'
|
||||
}
|
||||
4
ts_web/index.ts
Normal file
4
ts_web/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
// ts_web - Browser-compatible entry point
|
||||
// Re-exports everything from ts_shared
|
||||
|
||||
export * from '../ts_shared/index.js';
|
||||
3
ts_web/plugins.ts
Normal file
3
ts_web/plugins.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
// Browser-compatible plugins for ts_web
|
||||
// Re-export from ts_shared
|
||||
export * from '../ts_shared/plugins.js';
|
||||
Reference in New Issue
Block a user