Compare commits
35 Commits
Author | SHA1 | Date | |
---|---|---|---|
9dbb7d9731 | |||
4428638170 | |||
1af585594c | |||
780db4921e | |||
ed5f590b5f | |||
a32ed0facd | |||
b5a3793ed5 | |||
be1bc958d8 | |||
21434622dd | |||
50c0368ac7 | |||
78b3fcfd83 | |||
61caf51f4e | |||
be4e2cdae7 | |||
1cb8331666 | |||
aa203c5ab2 | |||
87aedd5ef5 | |||
64431703b5 | |||
120fd0b321 | |||
e6192c418e | |||
00a039936f | |||
e03c3a62b1 | |||
538eced73b | |||
fe8a5713f0 | |||
37e8a1d0f7 | |||
5143cd098d | |||
4d5ea812af | |||
500ef01ded | |||
3196a02835 | |||
de1c46ed0a | |||
b4c7b065fa | |||
93da11a951 | |||
ecd7f6d419 | |||
a3ecfe4d99 | |||
f99f6d96c5 | |||
b4be70f43a |
@@ -6,8 +6,8 @@ on:
|
||||
- '**'
|
||||
|
||||
env:
|
||||
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
|
||||
IMAGE: code.foss.global/host.today/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
|
||||
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||
@@ -26,7 +26,7 @@ jobs:
|
||||
- name: Install pnpm and npmci
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
pnpm install -g @ship.zone/npmci
|
||||
|
||||
- name: Run npm prepare
|
||||
run: npmci npm prepare
|
||||
|
@@ -6,8 +6,8 @@ on:
|
||||
- '*'
|
||||
|
||||
env:
|
||||
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
|
||||
IMAGE: code.foss.global/host.today/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
|
||||
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||
@@ -26,7 +26,7 @@ jobs:
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Audit production dependencies
|
||||
@@ -54,7 +54,7 @@ jobs:
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Test stable
|
||||
@@ -82,7 +82,7 @@ jobs:
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Release
|
||||
@@ -104,7 +104,7 @@ jobs:
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Code quality
|
||||
|
7
.gitignore
vendored
7
.gitignore
vendored
@@ -3,7 +3,6 @@
|
||||
# artifacts
|
||||
coverage/
|
||||
public/
|
||||
pages/
|
||||
|
||||
# installs
|
||||
node_modules/
|
||||
@@ -17,4 +16,8 @@ node_modules/
|
||||
dist/
|
||||
dist_*/
|
||||
|
||||
# custom
|
||||
# AI
|
||||
.claude/
|
||||
.serena/
|
||||
|
||||
#------# custom
|
BIN
.serena/cache/typescript/document_symbols_cache_v23-06-25.pkl
vendored
Normal file
BIN
.serena/cache/typescript/document_symbols_cache_v23-06-25.pkl
vendored
Normal file
Binary file not shown.
68
.serena/project.yml
Normal file
68
.serena/project.yml
Normal file
@@ -0,0 +1,68 @@
|
||||
# language of the project (csharp, python, rust, java, typescript, go, cpp, or ruby)
|
||||
# * For C, use cpp
|
||||
# * For JavaScript, use typescript
|
||||
# Special requirements:
|
||||
# * csharp: Requires the presence of a .sln file in the project folder.
|
||||
language: typescript
|
||||
|
||||
# whether to use the project's gitignore file to ignore files
|
||||
# Added on 2025-04-07
|
||||
ignore_all_files_in_gitignore: true
|
||||
# list of additional paths to ignore
|
||||
# same syntax as gitignore, so you can use * and **
|
||||
# Was previously called `ignored_dirs`, please update your config if you are using that.
|
||||
# Added (renamed) on 2025-04-07
|
||||
ignored_paths: []
|
||||
|
||||
# whether the project is in read-only mode
|
||||
# If set to true, all editing tools will be disabled and attempts to use them will result in an error
|
||||
# Added on 2025-04-18
|
||||
read_only: false
|
||||
|
||||
|
||||
# list of tool names to exclude. We recommend not excluding any tools, see the readme for more details.
|
||||
# Below is the complete list of tools for convenience.
|
||||
# To make sure you have the latest list of tools, and to view their descriptions,
|
||||
# execute `uv run scripts/print_tool_overview.py`.
|
||||
#
|
||||
# * `activate_project`: Activates a project by name.
|
||||
# * `check_onboarding_performed`: Checks whether project onboarding was already performed.
|
||||
# * `create_text_file`: Creates/overwrites a file in the project directory.
|
||||
# * `delete_lines`: Deletes a range of lines within a file.
|
||||
# * `delete_memory`: Deletes a memory from Serena's project-specific memory store.
|
||||
# * `execute_shell_command`: Executes a shell command.
|
||||
# * `find_referencing_code_snippets`: Finds code snippets in which the symbol at the given location is referenced.
|
||||
# * `find_referencing_symbols`: Finds symbols that reference the symbol at the given location (optionally filtered by type).
|
||||
# * `find_symbol`: Performs a global (or local) search for symbols with/containing a given name/substring (optionally filtered by type).
|
||||
# * `get_current_config`: Prints the current configuration of the agent, including the active and available projects, tools, contexts, and modes.
|
||||
# * `get_symbols_overview`: Gets an overview of the top-level symbols defined in a given file.
|
||||
# * `initial_instructions`: Gets the initial instructions for the current project.
|
||||
# Should only be used in settings where the system prompt cannot be set,
|
||||
# e.g. in clients you have no control over, like Claude Desktop.
|
||||
# * `insert_after_symbol`: Inserts content after the end of the definition of a given symbol.
|
||||
# * `insert_at_line`: Inserts content at a given line in a file.
|
||||
# * `insert_before_symbol`: Inserts content before the beginning of the definition of a given symbol.
|
||||
# * `list_dir`: Lists files and directories in the given directory (optionally with recursion).
|
||||
# * `list_memories`: Lists memories in Serena's project-specific memory store.
|
||||
# * `onboarding`: Performs onboarding (identifying the project structure and essential tasks, e.g. for testing or building).
|
||||
# * `prepare_for_new_conversation`: Provides instructions for preparing for a new conversation (in order to continue with the necessary context).
|
||||
# * `read_file`: Reads a file within the project directory.
|
||||
# * `read_memory`: Reads the memory with the given name from Serena's project-specific memory store.
|
||||
# * `remove_project`: Removes a project from the Serena configuration.
|
||||
# * `replace_lines`: Replaces a range of lines within a file with new content.
|
||||
# * `replace_symbol_body`: Replaces the full definition of a symbol.
|
||||
# * `restart_language_server`: Restarts the language server, may be necessary when edits not through Serena happen.
|
||||
# * `search_for_pattern`: Performs a search for a pattern in the project.
|
||||
# * `summarize_changes`: Provides instructions for summarizing the changes made to the codebase.
|
||||
# * `switch_modes`: Activates modes by providing a list of their names
|
||||
# * `think_about_collected_information`: Thinking tool for pondering the completeness of collected information.
|
||||
# * `think_about_task_adherence`: Thinking tool for determining whether the agent is still on track with the current task.
|
||||
# * `think_about_whether_you_are_done`: Thinking tool for determining whether the task is truly completed.
|
||||
# * `write_memory`: Writes a named memory (for future reference) to Serena's project-specific memory store.
|
||||
excluded_tools: []
|
||||
|
||||
# initial prompt for the project. It will always be given to the LLM upon activating the project
|
||||
# (contrary to the memories, which are loaded on demand).
|
||||
initial_prompt: ""
|
||||
|
||||
project_name: "smartarchive"
|
96
changelog.md
Normal file
96
changelog.md
Normal file
@@ -0,0 +1,96 @@
|
||||
# Changelog
|
||||
|
||||
## 2025-08-18 - 4.2.1 - fix(gzip)
|
||||
Improve gzip streaming decompression, archive analysis and unpacking; add gzip tests
|
||||
|
||||
- Add a streaming DecompressGunzipTransform using fflate.Gunzip with proper _flush handling to support chunked gzip input and avoid buffering issues.
|
||||
- Refactor ArchiveAnalyzer: introduce IAnalyzedResult, getAnalyzedStream(), and getDecompressionStream() to better detect mime types and wire appropriate decompression streams (gzip, zip, bzip2, tar).
|
||||
- Use SmartRequest response streams converted via stream.Readable.fromWeb for URL sources in SmartArchive.getArchiveStream() to improve remote archive handling.
|
||||
- Improve nested archive unpacking and SmartArchive export pipeline: more robust tar/zip handling, consistent SmartDuplex usage and backpressure handling.
|
||||
- Enhance exportToFs: ensure directories, improved logging for relative paths, and safer write-stream wiring.
|
||||
- Add comprehensive gzip-focused tests (test/test.gzip.ts) covering file extraction, stream extraction, header filename handling, large files, and a real-world tgz-from-URL extraction scenario.
|
||||
|
||||
## 2025-08-18 - 4.2.0 - feat(classes.smartarchive)
|
||||
Support URL streams, recursive archive unpacking and filesystem export; improve ZIP/GZIP/BZIP2 robustness; CI and package metadata updates
|
||||
|
||||
- Add exportToFs(targetDir, fileName?) to write extracted StreamFile objects to the filesystem (ensures directories, logs relative paths, waits for write completion).
|
||||
- Implement exportToStreamOfStreamFiles with recursive unpacking pipeline that handles application/x-tar (tar-stream Extract), application/zip (fflate Unzip), nested archives and StreamIntake for StreamFile results.
|
||||
- Enhance getArchiveStream() to support URL/web streams (SmartRequest) and return Node Readable streams for remote archives.
|
||||
- Make ZIP decompression more robust: accept ArrayBuffer-like chunks, coerce to Buffer before pushing to fflate.Unzip, and ensure SmartDuplex handling of results.
|
||||
- Fixes and improvements to bzip2/gzip/tar tool implementations (various bug/formatting fixes, safer CRC and stream handling).
|
||||
- Update CI workflows to use new registry image and adjust npmci install path; minor .gitignore additions.
|
||||
- Package metadata tweaks: bugs URL and homepage updated, packageManager/pnpm fields adjusted.
|
||||
- Documentation/readme expanded and polished with quick start, examples and API reference updates.
|
||||
- Small test and plugin export cleanups (formatting and trailing commas removed/added).
|
||||
- TypeScript/formatting fixes across many files (consistent casing, trailing commas, typings, tsconfig additions).
|
||||
|
||||
## 2025-08-18 - 4.1.0 - feat(classes.smartarchive)
|
||||
|
||||
Support URL web streams, add recursive archive unpacking and filesystem export, and improve ZIP decompression robustness
|
||||
|
||||
- ts/classes.smartarchive.ts: add exportToFs(targetDir, fileName?) to write extracted StreamFile objects to the filesystem (ensures directories, logs relative paths, waits for write completion).
|
||||
- ts/classes.smartarchive.ts: implement exportToStreamOfStreamFiles with recursive unpacking pipeline that handles application/x-tar (tar-stream Extract), application/zip (fflate unzip), nested archives and StreamIntake for StreamFile results.
|
||||
- ts/classes.smartarchive.ts: improve getArchiveStream() for URL sources by using SmartRequest.create().url(...).get() and converting the returned Web stream into a Node Readable stream.
|
||||
- ts/classes.ziptools.ts: make ZIP decompression writeFunction more robust — accept non-Buffer chunks, coerce to Buffer before pushing to fflate.Unzip, and loosen the writeFunction typing to handle incoming ArrayBuffer-like data.
|
||||
|
||||
## 2024-10-13 - 4.0.39 - fix(core)
|
||||
|
||||
Fix dependencies and update documentation.
|
||||
|
||||
- Ensure package uses the latest dependencies
|
||||
- Reviewed and grouped imports in TypeScript files
|
||||
- Updated readme with advanced usage examples
|
||||
|
||||
## 2024-10-13 - 4.0.38 - fix(dependencies)
|
||||
|
||||
Update dependencies to latest versions
|
||||
|
||||
- Updated @push.rocks/smartfile to version 11.0.21
|
||||
- Updated @push.rocks/smartpromise to version 4.0.4
|
||||
- Updated @push.rocks/smartstream to version 3.0.46
|
||||
- Updated @push.rocks/smarturl to version 3.1.0
|
||||
- Updated file-type to version 19.5.0
|
||||
- Updated @git.zone/tsbuild to version 2.1.84
|
||||
- Updated @git.zone/tsrun to version 1.2.49
|
||||
- Updated @push.rocks/tapbundle to version 5.3.0
|
||||
|
||||
## 2024-06-08 - 4.0.24 to 4.0.37 - Fixes and Updates
|
||||
|
||||
Core updates and bug fixes were implemented in versions 4.0.24 through 4.0.37.
|
||||
|
||||
- Repeated core updates and fixes applied consistently across multiple versions.
|
||||
|
||||
## 2024-06-06 - 4.0.22 to 4.0.23 - Descriptions and Fixes Updates
|
||||
|
||||
Efforts to update documentation and core features.
|
||||
|
||||
- "update description" in 4.0.22
|
||||
- Updates to `tsconfig` and `npmextra.json` were performed.
|
||||
- Ongoing core fixes.
|
||||
|
||||
## 2023-11-06 - 4.0.0 - Major Update with Breaking Changes
|
||||
|
||||
Introduction of significant updates and breaking changes.
|
||||
|
||||
- Transition to new version 4.0.0 with core updates.
|
||||
- Break in compatibility due to major structural changes with core functionalities.
|
||||
|
||||
## 2023-07-11 - 3.0.6 - Organizational Changes
|
||||
|
||||
Structural reorganization and updates to the organization schema.
|
||||
|
||||
- Switch to new organizational schema implemented.
|
||||
|
||||
## 2022-04-04 - 3.0.0 - Build Updates and Breaking Changes
|
||||
|
||||
Major build update introducing breaking changes.
|
||||
|
||||
- Introduction of ESM structure with breaking changes.
|
||||
|
||||
## 2016-01-18 - 0.0.0 to 1.0.0 - Initial Development and Launch
|
||||
|
||||
Initial software development and establishment of core features.
|
||||
|
||||
- Project set-up including Travis CI integration.
|
||||
- Launch of the first full version with code restructuring.
|
||||
- Added callback support.
|
3
dist_ts/index.d.ts
vendored
3
dist_ts/index.d.ts
vendored
@@ -1 +1,4 @@
|
||||
export * from './classes.smartarchive.js';
|
||||
export * from './classes.tartools.js';
|
||||
export * from './classes.ziptools.js';
|
||||
export * from './classes.gziptools.js';
|
||||
|
@@ -1,2 +1,5 @@
|
||||
export * from './classes.smartarchive.js';
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxjQUFjLDJCQUEyQixDQUFDIn0=
|
||||
export * from './classes.tartools.js';
|
||||
export * from './classes.ziptools.js';
|
||||
export * from './classes.gziptools.js';
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxjQUFjLDJCQUEyQixDQUFDO0FBQzFDLGNBQWMsdUJBQXVCLENBQUM7QUFDdEMsY0FBYyx1QkFBdUIsQ0FBQztBQUN0QyxjQUFjLHdCQUF3QixDQUFDIn0=
|
@@ -18,12 +18,16 @@
|
||||
"decompression",
|
||||
"zip",
|
||||
"tar",
|
||||
"gzip",
|
||||
"bzip2",
|
||||
"gzip"
|
||||
"file extraction",
|
||||
"file creation",
|
||||
"data analysis",
|
||||
"file stream"
|
||||
]
|
||||
}
|
||||
},
|
||||
"tsdoc": {
|
||||
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
45
package.json
45
package.json
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@push.rocks/smartarchive",
|
||||
"version": "4.0.25",
|
||||
"version": "4.2.1",
|
||||
"description": "A library for working with archive files, providing utilities for compressing and decompressing data.",
|
||||
"main": "dist_ts/index.js",
|
||||
"typings": "dist_ts/index.d.ts",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"test": "(tstest test/ --web)",
|
||||
"test": "(tstest test/ --verbose)",
|
||||
"build": "tsbuild --web --allowimplicitany",
|
||||
"buildDocs": "tsdoc"
|
||||
},
|
||||
@@ -17,30 +17,29 @@
|
||||
"author": "Lossless GmbH",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/pushrocks/smartarchive/issues"
|
||||
"url": "https://code.foss.global/push.rocks/smartarchive/issues"
|
||||
},
|
||||
"homepage": "https://code.foss.global/push.rocks/smartarchive",
|
||||
"homepage": "https://code.foss.global/push.rocks/smartarchive#readme",
|
||||
"dependencies": {
|
||||
"@push.rocks/smartdelay": "^3.0.5",
|
||||
"@push.rocks/smartfile": "^11.0.20",
|
||||
"@push.rocks/smartpath": "^5.0.18",
|
||||
"@push.rocks/smartpromise": "^4.0.3",
|
||||
"@push.rocks/smartrequest": "^2.0.22",
|
||||
"@push.rocks/smartrx": "^3.0.7",
|
||||
"@push.rocks/smartstream": "^3.0.44",
|
||||
"@push.rocks/smartfile": "^11.2.7",
|
||||
"@push.rocks/smartpath": "^6.0.0",
|
||||
"@push.rocks/smartpromise": "^4.2.3",
|
||||
"@push.rocks/smartrequest": "^4.2.2",
|
||||
"@push.rocks/smartrx": "^3.0.10",
|
||||
"@push.rocks/smartstream": "^3.2.5",
|
||||
"@push.rocks/smartunique": "^3.0.9",
|
||||
"@push.rocks/smarturl": "^3.0.7",
|
||||
"@types/tar-stream": "^3.1.3",
|
||||
"@push.rocks/smarturl": "^3.1.0",
|
||||
"@types/tar-stream": "^3.1.4",
|
||||
"fflate": "^0.8.2",
|
||||
"file-type": "^19.0.0",
|
||||
"file-type": "^21.0.0",
|
||||
"tar-stream": "^3.1.7",
|
||||
"through": "^2.3.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@git.zone/tsbuild": "^2.1.80",
|
||||
"@git.zone/tsrun": "^1.2.44",
|
||||
"@git.zone/tstest": "^1.0.90",
|
||||
"@push.rocks/tapbundle": "^5.0.23"
|
||||
"@git.zone/tsbuild": "^2.6.6",
|
||||
"@git.zone/tsrun": "^1.3.3",
|
||||
"@git.zone/tstest": "^2.3.4"
|
||||
},
|
||||
"private": false,
|
||||
"files": [
|
||||
@@ -64,7 +63,15 @@
|
||||
"decompression",
|
||||
"zip",
|
||||
"tar",
|
||||
"gzip",
|
||||
"bzip2",
|
||||
"gzip"
|
||||
]
|
||||
"file extraction",
|
||||
"file creation",
|
||||
"data analysis",
|
||||
"file stream"
|
||||
],
|
||||
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748",
|
||||
"pnpm": {
|
||||
"overrides": {}
|
||||
}
|
||||
}
|
||||
|
7979
pnpm-lock.yaml
generated
7979
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
4
pnpm-workspace.yaml
Normal file
4
pnpm-workspace.yaml
Normal file
@@ -0,0 +1,4 @@
|
||||
onlyBuiltDependencies:
|
||||
- esbuild
|
||||
- mongodb-memory-server
|
||||
- puppeteer
|
367
readme.md
367
readme.md
@@ -1,94 +1,333 @@
|
||||
# @push.rocks/smartarchive
|
||||
work with archives
|
||||
# @push.rocks/smartarchive 📦
|
||||
|
||||
## Install
|
||||
**Powerful archive manipulation for modern Node.js applications**
|
||||
|
||||
To install `@push.rocks/smartarchive`, you need to use npm or yarn. Run either of the following commands in your project directory:
|
||||
`@push.rocks/smartarchive` is a versatile library for handling archive files with a focus on developer experience. Work with **zip**, **tar**, **gzip**, and **bzip2** formats through a unified, streaming-optimized API.
|
||||
|
||||
```shell
|
||||
npm install @push.rocks/smartarchive --save
|
||||
```
|
||||
## Features 🚀
|
||||
|
||||
or if you prefer yarn:
|
||||
- 📁 **Multi-format support** - Handle `.zip`, `.tar`, `.tar.gz`, `.tgz`, and `.bz2` archives
|
||||
- 🌊 **Streaming-first architecture** - Process large archives without memory constraints
|
||||
- 🔄 **Unified API** - Consistent interface across different archive formats
|
||||
- 🎯 **Smart detection** - Automatically identifies archive types
|
||||
- ⚡ **High performance** - Optimized for speed with parallel processing where possible
|
||||
- 🔧 **Flexible I/O** - Work with files, URLs, and streams seamlessly
|
||||
- 📊 **Archive analysis** - Inspect contents without extraction
|
||||
- 🛠️ **Modern TypeScript** - Full type safety and excellent IDE support
|
||||
|
||||
```shell
|
||||
## Installation 📥
|
||||
|
||||
```bash
|
||||
# Using npm
|
||||
npm install @push.rocks/smartarchive
|
||||
|
||||
# Using pnpm (recommended)
|
||||
pnpm add @push.rocks/smartarchive
|
||||
|
||||
# Using yarn
|
||||
yarn add @push.rocks/smartarchive
|
||||
```
|
||||
|
||||
This will add `@push.rocks/smartarchive` to your project's dependencies.
|
||||
## Quick Start 🎯
|
||||
|
||||
## Usage
|
||||
|
||||
`@push.rocks/smartarchive` is a powerful module designed to simplify the process of working with archive files such as zip, tar, gzip, and more. It provides an easy-to-use API for extracting, creating, and analyzing archives, making it an ideal choice for projects that require manipulation of archive files.
|
||||
|
||||
### Getting Started
|
||||
|
||||
First, import `SmartArchive` from `@push.rocks/smartarchive` using ESM syntax:
|
||||
### Extract an archive from URL
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
// Extract a .tar.gz archive from a URL directly to the filesystem
|
||||
const archive = await SmartArchive.fromArchiveUrl(
|
||||
'https://github.com/some/repo/archive/main.tar.gz'
|
||||
);
|
||||
await archive.exportToFs('./extracted');
|
||||
```
|
||||
|
||||
### Extracting Archive Files
|
||||
|
||||
To extract an archive file, you can use `SmartArchive.fromArchiveUrl`, `SmartArchive.fromArchiveFile`, or `SmartArchive.fromArchiveStream` methods depending on the source of your archive. Here's an example of extracting an archive from a URL:
|
||||
### Process archive as a stream
|
||||
|
||||
```typescript
|
||||
async function extractArchiveFromURL() {
|
||||
const url = 'https://example.com/archive.zip';
|
||||
const targetDir = '/path/to/extract';
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
const archive = await SmartArchive.fromArchiveUrl(url);
|
||||
await archive.exportToFs(targetDir);
|
||||
// Stream-based processing for memory efficiency
|
||||
const archive = await SmartArchive.fromArchiveFile('./large-archive.zip');
|
||||
const streamOfFiles = await archive.exportToStreamOfStreamFiles();
|
||||
|
||||
console.log('Archive extracted successfully.');
|
||||
// Process each file in the archive
|
||||
streamOfFiles.on('data', (fileStream) => {
|
||||
console.log(`Processing ${fileStream.path}`);
|
||||
// Handle individual file stream
|
||||
});
|
||||
```
|
||||
|
||||
## Core Concepts 💡
|
||||
|
||||
### Archive Sources
|
||||
|
||||
`SmartArchive` accepts archives from three sources:
|
||||
|
||||
1. **URL** - Download and process archives from the web
|
||||
2. **File** - Load archives from the local filesystem
|
||||
3. **Stream** - Process archives from any Node.js stream
|
||||
|
||||
### Export Destinations
|
||||
|
||||
Extract archives to multiple destinations:
|
||||
|
||||
1. **Filesystem** - Extract directly to a directory
|
||||
2. **Stream of files** - Process files individually as streams
|
||||
3. **Archive stream** - Re-stream as different format
|
||||
|
||||
## Usage Examples 🔨
|
||||
|
||||
### Working with ZIP files
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
// Extract a ZIP file
|
||||
const zipArchive = await SmartArchive.fromArchiveFile('./archive.zip');
|
||||
await zipArchive.exportToFs('./output');
|
||||
|
||||
// Stream ZIP contents for processing
|
||||
const fileStream = await zipArchive.exportToStreamOfStreamFiles();
|
||||
fileStream.on('data', (file) => {
|
||||
if (file.path.endsWith('.json')) {
|
||||
// Process JSON files from the archive
|
||||
file.pipe(jsonProcessor);
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
### Working with TAR archives
|
||||
|
||||
```typescript
|
||||
import { SmartArchive, TarTools } from '@push.rocks/smartarchive';
|
||||
|
||||
// Extract a .tar.gz file
|
||||
const tarGzArchive = await SmartArchive.fromArchiveFile('./archive.tar.gz');
|
||||
await tarGzArchive.exportToFs('./extracted');
|
||||
|
||||
// Create a TAR archive (using TarTools directly)
|
||||
const tarTools = new TarTools();
|
||||
const packStream = await tarTools.packDirectory('./source-directory');
|
||||
packStream.pipe(createWriteStream('./output.tar'));
|
||||
```
|
||||
|
||||
### Extracting from URLs
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
// Download and extract in one operation
|
||||
const remoteArchive = await SmartArchive.fromArchiveUrl(
|
||||
'https://example.com/data.tar.gz'
|
||||
);
|
||||
|
||||
// Extract to filesystem
|
||||
await remoteArchive.exportToFs('./local-dir');
|
||||
|
||||
// Or process as stream
|
||||
const stream = await remoteArchive.exportToStreamOfStreamFiles();
|
||||
```
|
||||
|
||||
### Analyzing archive contents
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
// Analyze without extracting
|
||||
const archive = await SmartArchive.fromArchiveFile('./archive.zip');
|
||||
const analyzer = archive.archiveAnalyzer;
|
||||
|
||||
// Use the analyzer to inspect contents
|
||||
// (exact implementation depends on analyzer methods)
|
||||
```
|
||||
|
||||
### Working with GZIP files
|
||||
|
||||
```typescript
|
||||
import { SmartArchive, GzipTools } from '@push.rocks/smartarchive';
|
||||
|
||||
// Decompress a .gz file
|
||||
const gzipArchive = await SmartArchive.fromArchiveFile('./data.json.gz');
|
||||
await gzipArchive.exportToFs('./decompressed', 'data.json');
|
||||
|
||||
// Use GzipTools directly for streaming
|
||||
const gzipTools = new GzipTools();
|
||||
const decompressStream = gzipTools.getDecompressionStream();
|
||||
|
||||
createReadStream('./compressed.gz')
|
||||
.pipe(decompressStream)
|
||||
.pipe(createWriteStream('./decompressed'));
|
||||
```
|
||||
|
||||
### Working with BZIP2 files
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
// Handle .bz2 files
|
||||
const bzipArchive = await SmartArchive.fromArchiveUrl(
|
||||
'https://example.com/data.bz2'
|
||||
);
|
||||
await bzipArchive.exportToFs('./extracted', 'data.txt');
|
||||
```
|
||||
|
||||
### Advanced streaming operations
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
import { pipeline } from 'stream/promises';
|
||||
|
||||
// Chain operations with streams
|
||||
const archive = await SmartArchive.fromArchiveFile('./archive.tar.gz');
|
||||
const exportStream = await archive.exportToStreamOfStreamFiles();
|
||||
|
||||
// Process each file in the archive
|
||||
await pipeline(
|
||||
exportStream,
|
||||
async function* (source) {
|
||||
for await (const file of source) {
|
||||
if (file.path.endsWith('.log')) {
|
||||
// Process log files
|
||||
yield processLogFile(file);
|
||||
}
|
||||
}
|
||||
},
|
||||
createWriteStream('./processed-logs.txt')
|
||||
);
|
||||
```
|
||||
|
||||
### Creating archives (advanced)
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
import { TarTools } from '@push.rocks/smartarchive';
|
||||
|
||||
// Using SmartArchive to create an archive
|
||||
const archive = new SmartArchive();
|
||||
|
||||
// Add content to the archive
|
||||
archive.addedDirectories.push('./src');
|
||||
archive.addedFiles.push('./readme.md');
|
||||
archive.addedFiles.push('./package.json');
|
||||
|
||||
// Export as TAR.GZ
|
||||
const tarGzStream = await archive.exportToTarGzStream();
|
||||
tarGzStream.pipe(createWriteStream('./output.tar.gz'));
|
||||
```
|
||||
|
||||
### Extract and transform
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
import { Transform } from 'stream';
|
||||
|
||||
// Extract and transform files in one pipeline
|
||||
const archive = await SmartArchive.fromArchiveUrl(
|
||||
'https://example.com/source-code.tar.gz'
|
||||
);
|
||||
|
||||
const extractStream = await archive.exportToStreamOfStreamFiles();
|
||||
|
||||
// Transform TypeScript to JavaScript during extraction
|
||||
extractStream.on('data', (fileStream) => {
|
||||
if (fileStream.path.endsWith('.ts')) {
|
||||
fileStream
|
||||
.pipe(typescriptTranspiler())
|
||||
.pipe(createWriteStream(fileStream.path.replace('.ts', '.js')));
|
||||
} else {
|
||||
fileStream.pipe(createWriteStream(fileStream.path));
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
## API Reference 📚
|
||||
|
||||
### SmartArchive Class
|
||||
|
||||
#### Static Methods
|
||||
|
||||
- `SmartArchive.fromArchiveUrl(url: string)` - Create from URL
|
||||
- `SmartArchive.fromArchiveFile(path: string)` - Create from file
|
||||
- `SmartArchive.fromArchiveStream(stream: NodeJS.ReadableStream)` - Create from stream
|
||||
|
||||
#### Instance Methods
|
||||
|
||||
- `exportToFs(targetDir: string, fileName?: string)` - Extract to filesystem
|
||||
- `exportToStreamOfStreamFiles()` - Get a stream of file streams
|
||||
- `exportToTarGzStream()` - Export as TAR.GZ stream
|
||||
- `getArchiveStream()` - Get the raw archive stream
|
||||
|
||||
#### Properties
|
||||
|
||||
- `archiveAnalyzer` - Analyze archive contents
|
||||
- `tarTools` - TAR-specific operations
|
||||
- `zipTools` - ZIP-specific operations
|
||||
- `gzipTools` - GZIP-specific operations
|
||||
- `bzip2Tools` - BZIP2-specific operations
|
||||
|
||||
### Specialized Tools
|
||||
|
||||
Each tool class provides format-specific operations:
|
||||
|
||||
- **TarTools** - Pack/unpack TAR archives
|
||||
- **ZipTools** - Handle ZIP compression
|
||||
- **GzipTools** - GZIP compression/decompression
|
||||
- **Bzip2Tools** - BZIP2 operations
|
||||
|
||||
## Performance Tips 🏎️
|
||||
|
||||
1. **Use streaming for large files** - Avoid loading entire archives into memory
|
||||
2. **Process files in parallel** - Utilize stream operations for concurrent processing
|
||||
3. **Choose the right format** - TAR.GZ for Unix systems, ZIP for cross-platform compatibility
|
||||
4. **Enable compression wisely** - Balance between file size and CPU usage
|
||||
|
||||
## Error Handling 🛡️
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
try {
|
||||
const archive = await SmartArchive.fromArchiveUrl('https://example.com/file.zip');
|
||||
await archive.exportToFs('./output');
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
console.error('Archive file not found');
|
||||
} else if (error.code === 'EACCES') {
|
||||
console.error('Permission denied');
|
||||
} else {
|
||||
console.error('Archive extraction failed:', error.message);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Creating Archive Files
|
||||
|
||||
Creating archive files such as zip or tar.gz is straightforward with `smartarchive`. At the moment, you'll prepare the contents programmatically and then compress them. Detailed support for creating archives will be covered in future updates.
|
||||
|
||||
### Analyzing Archive Files
|
||||
|
||||
Analyzing the content of archives without extracting them can be useful in various scenarios, such as when you need to inspect the archive's content before deciding to extract it. Here's how you might analyze an archive:
|
||||
## Real-World Use Cases 🌍
|
||||
|
||||
### Backup System
|
||||
```typescript
|
||||
async function analyzeArchive() {
|
||||
const url = 'https://example.com/archive.zip';
|
||||
|
||||
const archive = await SmartArchive.fromArchiveUrl(url);
|
||||
const analysisResult = await archive.analyzeContent();
|
||||
|
||||
console.log(analysisResult); // Outputs details about the archive content
|
||||
}
|
||||
// Automated backup extraction
|
||||
const backup = await SmartArchive.fromArchiveFile('./backup.tar.gz');
|
||||
await backup.exportToFs('/restore/location');
|
||||
```
|
||||
|
||||
Note: Replace `analyzeContent` with the appropriate method calls as per your implementation or update, as `smartarchive` provides foundational classes and methods for interaction with archive files but does not directly implement an `analyzeContent` method by default.
|
||||
|
||||
### Stream Operations
|
||||
|
||||
`smartarchive` offers streaming operations, allowing you to work with large archives efficiently. Here's an example of using streams to extract an archive:
|
||||
|
||||
### CI/CD Pipeline
|
||||
```typescript
|
||||
import { createReadStream, createWriteStream } from 'fs';
|
||||
|
||||
async function extractArchiveUsingStream() {
|
||||
const archiveStream = createReadStream('/path/to/archive.zip');
|
||||
const archive = await SmartArchive.fromArchiveStream(archiveStream);
|
||||
const extractionStream = await archive.exportToStreamOfStreamFiles();
|
||||
|
||||
extractionStream.pipe(createWriteStream('/path/to/destination'));
|
||||
}
|
||||
// Download and extract build artifacts
|
||||
const artifacts = await SmartArchive.fromArchiveUrl(
|
||||
`${CI_SERVER}/artifacts/build-${BUILD_ID}.zip`
|
||||
);
|
||||
await artifacts.exportToFs('./dist');
|
||||
```
|
||||
|
||||
### Conclusion
|
||||
|
||||
`@push.rocks/smartarchive` simplifies the process of working with various archive formats in JavaScript and TypeScript projects. By providing an easy-to-use API for common archive operations, it enables developers to integrate archive manipulation features into their applications efficiently.
|
||||
|
||||
Remember, the examples provided here are to give you a starting point. Depending on your specific use case, you may need to adjust these examples to fit your project's requirements. Always refer to the latest documentation for the most current information and methods available in `@push.rocks/smartarchive`.
|
||||
|
||||
For more information and API references, check the official [`@push.rocks/smartarchive` GitHub repository](https://github.com/pushrocks/smartarchive).
|
||||
### Data Processing
|
||||
```typescript
|
||||
// Process compressed datasets
|
||||
const dataset = await SmartArchive.fromArchiveUrl(
|
||||
'https://data.source/dataset.tar.bz2'
|
||||
);
|
||||
const files = await dataset.exportToStreamOfStreamFiles();
|
||||
// Process each file in the dataset
|
||||
```
|
||||
|
||||
## License and Legal Information
|
||||
|
||||
@@ -107,4 +346,4 @@ Registered at District court Bremen HRB 35230 HB, Germany
|
||||
|
||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
||||
|
||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
@@ -4,10 +4,4 @@ import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartrequest from '@push.rocks/smartrequest';
|
||||
import * as smartstream from '@push.rocks/smartstream';
|
||||
|
||||
export {
|
||||
path,
|
||||
smartpath,
|
||||
smartfile,
|
||||
smartrequest,
|
||||
smartstream,
|
||||
}
|
||||
export { path, smartpath, smartfile, smartrequest, smartstream };
|
||||
|
219
test/test.gzip.ts
Normal file
219
test/test.gzip.ts
Normal file
@@ -0,0 +1,219 @@
|
||||
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||
import * as plugins from './plugins.js';
|
||||
import * as smartarchive from '../ts/index.js';
|
||||
|
||||
const testPaths = {
|
||||
nogitDir: plugins.path.join(
|
||||
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
|
||||
'../.nogit/',
|
||||
),
|
||||
gzipTestDir: plugins.path.join(
|
||||
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
|
||||
'../.nogit/gzip-test',
|
||||
),
|
||||
};
|
||||
|
||||
tap.preTask('should prepare test directories', async () => {
|
||||
await plugins.smartfile.fs.ensureDir(testPaths.gzipTestDir);
|
||||
});
|
||||
|
||||
tap.test('should create and extract a gzip file', async () => {
|
||||
// Create test data
|
||||
const testContent = 'This is a test file for gzip compression and decompression.\n'.repeat(100);
|
||||
const testFileName = 'test-file.txt';
|
||||
const gzipFileName = 'test-file.txt.gz';
|
||||
|
||||
// Write the original file
|
||||
await plugins.smartfile.memory.toFs(
|
||||
testContent,
|
||||
plugins.path.join(testPaths.gzipTestDir, testFileName)
|
||||
);
|
||||
|
||||
// Compress the file using gzip
|
||||
const originalFile = await plugins.smartfile.fs.fileTreeToObject(
|
||||
testPaths.gzipTestDir,
|
||||
testFileName
|
||||
);
|
||||
|
||||
// Create gzip compressed version using fflate directly
|
||||
const fflate = await import('fflate');
|
||||
const compressed = fflate.gzipSync(Buffer.from(testContent));
|
||||
await plugins.smartfile.memory.toFs(
|
||||
Buffer.from(compressed),
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
// Now test extraction using SmartArchive
|
||||
const gzipArchive = await smartarchive.SmartArchive.fromArchiveFile(
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
// Export to a new location
|
||||
const extractPath = plugins.path.join(testPaths.gzipTestDir, 'extracted');
|
||||
await plugins.smartfile.fs.ensureDir(extractPath);
|
||||
// Provide a filename since gzip doesn't contain filename metadata
|
||||
await gzipArchive.exportToFs(extractPath, 'test-file.txt');
|
||||
|
||||
// Read the extracted file
|
||||
const extractedContent = await plugins.smartfile.fs.toStringSync(
|
||||
plugins.path.join(extractPath, 'test-file.txt')
|
||||
);
|
||||
|
||||
// Verify the content matches
|
||||
expect(extractedContent).toEqual(testContent);
|
||||
});
|
||||
|
||||
tap.test('should handle gzip stream extraction', async () => {
|
||||
// Create test data
|
||||
const testContent = 'Stream test data for gzip\n'.repeat(50);
|
||||
const gzipFileName = 'stream-test.txt.gz';
|
||||
|
||||
// Create gzip compressed version
|
||||
const fflate = await import('fflate');
|
||||
const compressed = fflate.gzipSync(Buffer.from(testContent));
|
||||
await plugins.smartfile.memory.toFs(
|
||||
Buffer.from(compressed),
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
// Create a read stream for the gzip file
|
||||
const gzipStream = plugins.smartfile.fsStream.createReadStream(
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
// Test extraction using SmartArchive from stream
|
||||
const gzipArchive = await smartarchive.SmartArchive.fromArchiveStream(gzipStream);
|
||||
|
||||
// Export to stream and collect the result
|
||||
const streamFiles: any[] = [];
|
||||
const resultStream = await gzipArchive.exportToStreamOfStreamFiles();
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
resultStream.on('data', (streamFile) => {
|
||||
streamFiles.push(streamFile);
|
||||
});
|
||||
resultStream.on('end', resolve);
|
||||
resultStream.on('error', reject);
|
||||
});
|
||||
|
||||
// Verify we got the expected file
|
||||
expect(streamFiles.length).toBeGreaterThan(0);
|
||||
|
||||
// Read content from the stream file
|
||||
if (streamFiles[0]) {
|
||||
const chunks: Buffer[] = [];
|
||||
const readStream = await streamFiles[0].createReadStream();
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
readStream.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
readStream.on('end', resolve);
|
||||
readStream.on('error', reject);
|
||||
});
|
||||
|
||||
const extractedContent = Buffer.concat(chunks).toString();
|
||||
expect(extractedContent).toEqual(testContent);
|
||||
}
|
||||
});
|
||||
|
||||
tap.test('should handle gzip files with original filename in header', async () => {
|
||||
// Test with a real-world gzip file that includes filename in header
|
||||
const testContent = 'File with name in gzip header\n'.repeat(30);
|
||||
const originalFileName = 'original-name.log';
|
||||
const gzipFileName = 'compressed.gz';
|
||||
|
||||
// Create a proper gzip with filename header using Node's zlib
|
||||
const zlib = await import('zlib');
|
||||
const gzipBuffer = await new Promise<Buffer>((resolve, reject) => {
|
||||
zlib.gzip(Buffer.from(testContent), {
|
||||
level: 9,
|
||||
// Note: Node's zlib doesn't support embedding filename directly,
|
||||
// but we can test the extraction anyway
|
||||
}, (err, result) => {
|
||||
if (err) reject(err);
|
||||
else resolve(result);
|
||||
});
|
||||
});
|
||||
|
||||
await plugins.smartfile.memory.toFs(
|
||||
gzipBuffer,
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
// Test extraction
|
||||
const gzipArchive = await smartarchive.SmartArchive.fromArchiveFile(
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
const extractPath = plugins.path.join(testPaths.gzipTestDir, 'header-test');
|
||||
await plugins.smartfile.fs.ensureDir(extractPath);
|
||||
// Provide a filename since gzip doesn't reliably contain filename metadata
|
||||
await gzipArchive.exportToFs(extractPath, 'compressed.txt');
|
||||
|
||||
// Check if file was extracted (name might be derived from archive name)
|
||||
const files = await plugins.smartfile.fs.listFileTree(extractPath, '**/*');
|
||||
expect(files.length).toBeGreaterThan(0);
|
||||
|
||||
// Read and verify content
|
||||
const extractedFile = files[0];
|
||||
const extractedContent = await plugins.smartfile.fs.toStringSync(
|
||||
plugins.path.join(extractPath, extractedFile || 'compressed.txt')
|
||||
);
|
||||
expect(extractedContent).toEqual(testContent);
|
||||
});
|
||||
|
||||
tap.test('should handle large gzip files', async () => {
|
||||
// Create a larger test file
|
||||
const largeContent = 'x'.repeat(1024 * 1024); // 1MB of 'x' characters
|
||||
const gzipFileName = 'large-file.txt.gz';
|
||||
|
||||
// Compress the large file
|
||||
const fflate = await import('fflate');
|
||||
const compressed = fflate.gzipSync(Buffer.from(largeContent));
|
||||
await plugins.smartfile.memory.toFs(
|
||||
Buffer.from(compressed),
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
// Test extraction
|
||||
const gzipArchive = await smartarchive.SmartArchive.fromArchiveFile(
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
const extractPath = plugins.path.join(testPaths.gzipTestDir, 'large-extracted');
|
||||
await plugins.smartfile.fs.ensureDir(extractPath);
|
||||
// Provide a filename since gzip doesn't contain filename metadata
|
||||
await gzipArchive.exportToFs(extractPath, 'large-file.txt');
|
||||
|
||||
// Verify the extracted content
|
||||
const files = await plugins.smartfile.fs.listFileTree(extractPath, '**/*');
|
||||
expect(files.length).toBeGreaterThan(0);
|
||||
|
||||
const extractedContent = await plugins.smartfile.fs.toStringSync(
|
||||
plugins.path.join(extractPath, files[0] || 'large-file.txt')
|
||||
);
|
||||
expect(extractedContent.length).toEqual(largeContent.length);
|
||||
expect(extractedContent).toEqual(largeContent);
|
||||
});
|
||||
|
||||
tap.test('should handle real-world multi-chunk gzip from URL', async () => {
|
||||
// Test with a real tgz file that will be processed in multiple chunks
|
||||
const testUrl = 'https://registry.npmjs.org/@push.rocks/smartfile/-/smartfile-11.2.7.tgz';
|
||||
|
||||
// Download and extract the archive
|
||||
const testArchive = await smartarchive.SmartArchive.fromArchiveUrl(testUrl);
|
||||
|
||||
const extractPath = plugins.path.join(testPaths.gzipTestDir, 'real-world-test');
|
||||
await plugins.smartfile.fs.ensureDir(extractPath);
|
||||
|
||||
// This will test multi-chunk decompression as the file is larger
|
||||
await testArchive.exportToFs(extractPath);
|
||||
|
||||
// Verify extraction worked
|
||||
const files = await plugins.smartfile.fs.listFileTree(extractPath, '**/*');
|
||||
expect(files.length).toBeGreaterThan(0);
|
||||
|
||||
// Check for expected package structure
|
||||
const hasPackageJson = files.some(f => f.includes('package.json'));
|
||||
expect(hasPackageJson).toBeTrue();
|
||||
});
|
||||
|
||||
export default tap.start();
|
32
test/test.ts
32
test/test.ts
@@ -1,15 +1,15 @@
|
||||
import { tap, expect } from '@push.rocks/tapbundle';
|
||||
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
const testPaths = {
|
||||
nogitDir: plugins.path.join(
|
||||
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
|
||||
'../.nogit/'
|
||||
'../.nogit/',
|
||||
),
|
||||
remoteDir: plugins.path.join(
|
||||
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
|
||||
'../.nogit/remote'
|
||||
'../.nogit/remote',
|
||||
),
|
||||
};
|
||||
|
||||
@@ -20,31 +20,33 @@ tap.preTask('should prepare .nogit dir', async () => {
|
||||
});
|
||||
|
||||
tap.preTask('should prepare downloads', async (tools) => {
|
||||
const downloadedFile: Buffer = (
|
||||
await plugins.smartrequest.getBinary(
|
||||
'https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz'
|
||||
const response = await plugins.smartrequest.SmartRequest.create()
|
||||
.url(
|
||||
'https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz',
|
||||
)
|
||||
).body;
|
||||
.get();
|
||||
const downloadedFile: Buffer = Buffer.from(await response.arrayBuffer());
|
||||
await plugins.smartfile.memory.toFs(
|
||||
downloadedFile,
|
||||
plugins.path.join(testPaths.nogitDir, 'test.tgz')
|
||||
plugins.path.join(testPaths.nogitDir, 'test.tgz'),
|
||||
);
|
||||
});
|
||||
|
||||
tap.test('should extract existing files on disk', async () => {
|
||||
const testSmartarchive = await smartarchive.SmartArchive.fromArchiveUrl(
|
||||
'https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz'
|
||||
'https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz',
|
||||
);
|
||||
await testSmartarchive.exportToFs(testPaths.nogitDir);
|
||||
});
|
||||
|
||||
tap.skip.test('should extract a b2zip', async () => {
|
||||
const dataUrl = 'https://daten.offeneregister.de/de_companies_ocdata.jsonl.bz2';
|
||||
const dataUrl =
|
||||
'https://daten.offeneregister.de/de_companies_ocdata.jsonl.bz2';
|
||||
const testArchive = await smartarchive.SmartArchive.fromArchiveUrl(dataUrl);
|
||||
await testArchive.exportToFs(
|
||||
plugins.path.join(testPaths.nogitDir, 'de_companies_ocdata.jsonl'),
|
||||
'data.jsonl',
|
||||
);
|
||||
})
|
||||
await testArchive.exportToFs(
|
||||
plugins.path.join(testPaths.nogitDir, 'de_companies_ocdata.jsonl'),
|
||||
'data.jsonl',
|
||||
);
|
||||
});
|
||||
|
||||
await tap.start();
|
||||
|
@@ -1,8 +1,8 @@
|
||||
/**
|
||||
* autocreated commitinfo by @pushrocks/commitinfo
|
||||
* autocreated commitinfo by @push.rocks/commitinfo
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartarchive',
|
||||
version: '4.0.25',
|
||||
version: '4.2.1',
|
||||
description: 'A library for working with archive files, providing utilities for compressing and decompressing data.'
|
||||
}
|
||||
|
@@ -1,41 +1,44 @@
|
||||
var BITMASK = [0, 0x01, 0x03, 0x07, 0x0F, 0x1F, 0x3F, 0x7F, 0xFF];
|
||||
var BITMASK = [0, 0x01, 0x03, 0x07, 0x0f, 0x1f, 0x3f, 0x7f, 0xff];
|
||||
|
||||
// returns a function that reads bits.
|
||||
// takes a buffer iterator as input
|
||||
export function bitIterator(nextBuffer: () => Buffer) {
|
||||
var bit = 0, byte = 0;
|
||||
var bytes = nextBuffer();
|
||||
var f = function(n) {
|
||||
if (n === null && bit != 0) { // align to byte boundary
|
||||
bit = 0
|
||||
byte++;
|
||||
return;
|
||||
}
|
||||
var result = 0;
|
||||
while(n > 0) {
|
||||
if (byte >= bytes.length) {
|
||||
byte = 0;
|
||||
bytes = nextBuffer();
|
||||
}
|
||||
var left = 8 - bit;
|
||||
if (bit === 0 && n > 0)
|
||||
// @ts-ignore
|
||||
f.bytesRead++;
|
||||
if (n >= left) {
|
||||
result <<= left;
|
||||
result |= (BITMASK[left] & bytes[byte++]);
|
||||
bit = 0;
|
||||
n -= left;
|
||||
} else {
|
||||
result <<= n;
|
||||
result |= ((bytes[byte] & (BITMASK[n] << (8 - n - bit))) >> (8 - n - bit));
|
||||
bit += n;
|
||||
n = 0;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
// @ts-ignore
|
||||
f.bytesRead = 0;
|
||||
return f;
|
||||
};
|
||||
var bit = 0,
|
||||
byte = 0;
|
||||
var bytes = nextBuffer();
|
||||
var f = function (n) {
|
||||
if (n === null && bit != 0) {
|
||||
// align to byte boundary
|
||||
bit = 0;
|
||||
byte++;
|
||||
return;
|
||||
}
|
||||
var result = 0;
|
||||
while (n > 0) {
|
||||
if (byte >= bytes.length) {
|
||||
byte = 0;
|
||||
bytes = nextBuffer();
|
||||
}
|
||||
var left = 8 - bit;
|
||||
if (bit === 0 && n > 0)
|
||||
// @ts-ignore
|
||||
f.bytesRead++;
|
||||
if (n >= left) {
|
||||
result <<= left;
|
||||
result |= BITMASK[left] & bytes[byte++];
|
||||
bit = 0;
|
||||
n -= left;
|
||||
} else {
|
||||
result <<= n;
|
||||
result |=
|
||||
(bytes[byte] & (BITMASK[n] << (8 - n - bit))) >> (8 - n - bit);
|
||||
bit += n;
|
||||
n = 0;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
// @ts-ignore
|
||||
f.bytesRead = 0;
|
||||
return f;
|
||||
}
|
||||
|
@@ -1,7 +1,7 @@
|
||||
export class Bzip2Error extends Error {
|
||||
public name: string = 'Bzip2Error';
|
||||
public message: string;
|
||||
public stack = (new Error()).stack;
|
||||
public stack = new Error().stack;
|
||||
|
||||
constructor(messageArg: string) {
|
||||
super();
|
||||
@@ -10,326 +10,322 @@ export class Bzip2Error extends Error {
|
||||
}
|
||||
|
||||
var messageArg = {
|
||||
Error: function(message) {throw new Bzip2Error(message);}
|
||||
Error: function (message) {
|
||||
throw new Bzip2Error(message);
|
||||
},
|
||||
};
|
||||
|
||||
export class Bzip2 {
|
||||
public Bzip2Error = Bzip2Error;
|
||||
public crcTable =
|
||||
[
|
||||
0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9,
|
||||
0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005,
|
||||
0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61,
|
||||
0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd,
|
||||
0x4c11db70, 0x48d0c6c7, 0x4593e01e, 0x4152fda9,
|
||||
0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75,
|
||||
0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011,
|
||||
0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd,
|
||||
0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039,
|
||||
0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5,
|
||||
0xbe2b5b58, 0xbaea46ef, 0xb7a96036, 0xb3687d81,
|
||||
0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d,
|
||||
0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49,
|
||||
0xc7361b4c, 0xc3f706fb, 0xceb42022, 0xca753d95,
|
||||
0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1,
|
||||
0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d,
|
||||
0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae,
|
||||
0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072,
|
||||
0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16,
|
||||
0x018aeb13, 0x054bf6a4, 0x0808d07d, 0x0cc9cdca,
|
||||
0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde,
|
||||
0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02,
|
||||
0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066,
|
||||
0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba,
|
||||
0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e,
|
||||
0xbfa1b04b, 0xbb60adfc, 0xb6238b25, 0xb2e29692,
|
||||
0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6,
|
||||
0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a,
|
||||
0xe0b41de7, 0xe4750050, 0xe9362689, 0xedf73b3e,
|
||||
0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2,
|
||||
0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686,
|
||||
0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a,
|
||||
0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637,
|
||||
0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb,
|
||||
0x4f040d56, 0x4bc510e1, 0x46863638, 0x42472b8f,
|
||||
0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53,
|
||||
0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47,
|
||||
0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b,
|
||||
0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff,
|
||||
0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623,
|
||||
0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60, 0xfc6c70d7,
|
||||
0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b,
|
||||
0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f,
|
||||
0xc423cd6a, 0xc0e2d0dd, 0xcda1f604, 0xc960ebb3,
|
||||
0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7,
|
||||
0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b,
|
||||
0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f,
|
||||
0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3,
|
||||
0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640,
|
||||
0x4e8ee645, 0x4a4ffbf2, 0x470cdd2b, 0x43cdc09c,
|
||||
0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8,
|
||||
0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24,
|
||||
0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30,
|
||||
0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec,
|
||||
0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088,
|
||||
0x2497d08d, 0x2056cd3a, 0x2d15ebe3, 0x29d4f654,
|
||||
0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0,
|
||||
0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c,
|
||||
0xe3a1cbc1, 0xe760d676, 0xea23f0af, 0xeee2ed18,
|
||||
0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4,
|
||||
0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0,
|
||||
0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c,
|
||||
0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668,
|
||||
0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4
|
||||
public crcTable = [
|
||||
0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9, 0x130476dc, 0x17c56b6b,
|
||||
0x1a864db2, 0x1e475005, 0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61,
|
||||
0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd, 0x4c11db70, 0x48d0c6c7,
|
||||
0x4593e01e, 0x4152fda9, 0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75,
|
||||
0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011, 0x791d4014, 0x7ddc5da3,
|
||||
0x709f7b7a, 0x745e66cd, 0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039,
|
||||
0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5, 0xbe2b5b58, 0xbaea46ef,
|
||||
0xb7a96036, 0xb3687d81, 0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d,
|
||||
0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49, 0xc7361b4c, 0xc3f706fb,
|
||||
0xceb42022, 0xca753d95, 0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1,
|
||||
0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d, 0x34867077, 0x30476dc0,
|
||||
0x3d044b19, 0x39c556ae, 0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072,
|
||||
0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16, 0x018aeb13, 0x054bf6a4,
|
||||
0x0808d07d, 0x0cc9cdca, 0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde,
|
||||
0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02, 0x5e9f46bf, 0x5a5e5b08,
|
||||
0x571d7dd1, 0x53dc6066, 0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba,
|
||||
0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e, 0xbfa1b04b, 0xbb60adfc,
|
||||
0xb6238b25, 0xb2e29692, 0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6,
|
||||
0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a, 0xe0b41de7, 0xe4750050,
|
||||
0xe9362689, 0xedf73b3e, 0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2,
|
||||
0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686, 0xd5b88683, 0xd1799b34,
|
||||
0xdc3abded, 0xd8fba05a, 0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637,
|
||||
0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb, 0x4f040d56, 0x4bc510e1,
|
||||
0x46863638, 0x42472b8f, 0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53,
|
||||
0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47, 0x36194d42, 0x32d850f5,
|
||||
0x3f9b762c, 0x3b5a6b9b, 0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff,
|
||||
0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623, 0xf12f560e, 0xf5ee4bb9,
|
||||
0xf8ad6d60, 0xfc6c70d7, 0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b,
|
||||
0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f, 0xc423cd6a, 0xc0e2d0dd,
|
||||
0xcda1f604, 0xc960ebb3, 0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7,
|
||||
0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b, 0x9b3660c6, 0x9ff77d71,
|
||||
0x92b45ba8, 0x9675461f, 0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3,
|
||||
0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640, 0x4e8ee645, 0x4a4ffbf2,
|
||||
0x470cdd2b, 0x43cdc09c, 0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8,
|
||||
0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24, 0x119b4be9, 0x155a565e,
|
||||
0x18197087, 0x1cd86d30, 0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec,
|
||||
0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088, 0x2497d08d, 0x2056cd3a,
|
||||
0x2d15ebe3, 0x29d4f654, 0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0,
|
||||
0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c, 0xe3a1cbc1, 0xe760d676,
|
||||
0xea23f0af, 0xeee2ed18, 0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4,
|
||||
0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0, 0x9abc8bd5, 0x9e7d9662,
|
||||
0x933eb0bb, 0x97ffad0c, 0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668,
|
||||
0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4,
|
||||
];
|
||||
|
||||
array = function(bytes) {
|
||||
var bit = 0, byte = 0;
|
||||
var BITMASK = [0, 0x01, 0x03, 0x07, 0x0F, 0x1F, 0x3F, 0x7F, 0xFF ];
|
||||
return function(n) {
|
||||
var result = 0;
|
||||
while(n > 0) {
|
||||
var left = 8 - bit;
|
||||
if (n >= left) {
|
||||
result <<= left;
|
||||
result |= (BITMASK[left] & bytes[byte++]);
|
||||
bit = 0;
|
||||
n -= left;
|
||||
} else {
|
||||
result <<= n;
|
||||
result |= ((bytes[byte] & (BITMASK[n] << (8 - n - bit))) >> (8 - n - bit));
|
||||
bit += n;
|
||||
n = 0;
|
||||
}
|
||||
array = function (bytes) {
|
||||
var bit = 0,
|
||||
byte = 0;
|
||||
var BITMASK = [0, 0x01, 0x03, 0x07, 0x0f, 0x1f, 0x3f, 0x7f, 0xff];
|
||||
return function (n) {
|
||||
var result = 0;
|
||||
while (n > 0) {
|
||||
var left = 8 - bit;
|
||||
if (n >= left) {
|
||||
result <<= left;
|
||||
result |= BITMASK[left] & bytes[byte++];
|
||||
bit = 0;
|
||||
n -= left;
|
||||
} else {
|
||||
result <<= n;
|
||||
result |=
|
||||
(bytes[byte] & (BITMASK[n] << (8 - n - bit))) >> (8 - n - bit);
|
||||
bit += n;
|
||||
n = 0;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
};
|
||||
|
||||
simple = function(srcbuffer, stream) {
|
||||
simple = function (srcbuffer, stream) {
|
||||
var bits = this.array(srcbuffer);
|
||||
var size = this.header(bits);
|
||||
var ret = false;
|
||||
var bufsize = 100000 * size;
|
||||
var buf = new Int32Array(bufsize);
|
||||
|
||||
do {
|
||||
ret = this.decompress(bits, stream, buf, bufsize);
|
||||
} while(!ret);
|
||||
}
|
||||
|
||||
header = function(bits) {
|
||||
do {
|
||||
ret = this.decompress(bits, stream, buf, bufsize);
|
||||
} while (!ret);
|
||||
};
|
||||
|
||||
header = function (bits) {
|
||||
this.byteCount = new Int32Array(256);
|
||||
this.symToByte = new Uint8Array(256);
|
||||
this.mtfSymbol = new Int32Array(256);
|
||||
this.selectors = new Uint8Array(0x8000);
|
||||
|
||||
if (bits(8*3) != 4348520) messageArg.Error("No magic number found");
|
||||
|
||||
|
||||
if (bits(8 * 3) != 4348520) messageArg.Error('No magic number found');
|
||||
|
||||
var i = bits(8) - 48;
|
||||
if (i < 1 || i > 9) messageArg.Error("Not a BZIP archive");
|
||||
if (i < 1 || i > 9) messageArg.Error('Not a BZIP archive');
|
||||
return i;
|
||||
};
|
||||
|
||||
decompress = function(bits, stream, buf, bufsize, streamCRC) {
|
||||
decompress = function (bits, stream, buf, bufsize, streamCRC) {
|
||||
var MAX_HUFCODE_BITS = 20;
|
||||
var MAX_SYMBOLS = 258;
|
||||
var SYMBOL_RUNA = 0;
|
||||
var SYMBOL_RUNB = 1;
|
||||
var GROUP_SIZE = 50;
|
||||
var crc = 0 ^ (-1);
|
||||
|
||||
for(var h = '', i = 0; i < 6; i++) h += bits(8).toString(16);
|
||||
if (h == "177245385090") {
|
||||
var finalCRC = bits(32)|0;
|
||||
if (finalCRC !== streamCRC) messageArg.Error("Error in bzip2: crc32 do not match");
|
||||
var crc = 0 ^ -1;
|
||||
|
||||
for (var h = '', i = 0; i < 6; i++) h += bits(8).toString(16);
|
||||
if (h == '177245385090') {
|
||||
var finalCRC = bits(32) | 0;
|
||||
if (finalCRC !== streamCRC)
|
||||
messageArg.Error('Error in bzip2: crc32 do not match');
|
||||
// align stream to byte
|
||||
bits(null);
|
||||
return null; // reset streamCRC for next call
|
||||
}
|
||||
if (h != "314159265359") messageArg.Error("eek not valid bzip data");
|
||||
var crcblock = bits(32)|0; // CRC code
|
||||
if (bits(1)) messageArg.Error("unsupported obsolete version");
|
||||
if (h != '314159265359') messageArg.Error('eek not valid bzip data');
|
||||
var crcblock = bits(32) | 0; // CRC code
|
||||
if (bits(1)) messageArg.Error('unsupported obsolete version');
|
||||
var origPtr = bits(24);
|
||||
if (origPtr > bufsize) messageArg.Error("Initial position larger than buffer size");
|
||||
if (origPtr > bufsize)
|
||||
messageArg.Error('Initial position larger than buffer size');
|
||||
var t = bits(16);
|
||||
var symTotal = 0;
|
||||
for (i = 0; i < 16; i++) {
|
||||
if (t & (1 << (15 - i))) {
|
||||
var k = bits(16);
|
||||
for(j = 0; j < 16; j++) {
|
||||
if (k & (1 << (15 - j))) {
|
||||
this.symToByte[symTotal++] = (16 * i) + j;
|
||||
}
|
||||
}
|
||||
if (t & (1 << (15 - i))) {
|
||||
var k = bits(16);
|
||||
for (j = 0; j < 16; j++) {
|
||||
if (k & (1 << (15 - j))) {
|
||||
this.symToByte[symTotal++] = 16 * i + j;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
var groupCount = bits(3);
|
||||
if (groupCount < 2 || groupCount > 6) messageArg.Error("another error");
|
||||
if (groupCount < 2 || groupCount > 6) messageArg.Error('another error');
|
||||
var nSelectors = bits(15);
|
||||
if (nSelectors == 0) messageArg.Error("meh");
|
||||
for(var i = 0; i < groupCount; i++) this.mtfSymbol[i] = i;
|
||||
|
||||
for(var i = 0; i < nSelectors; i++) {
|
||||
for(var j = 0; bits(1); j++) if (j >= groupCount) messageArg.Error("whoops another error");
|
||||
var uc = this.mtfSymbol[j];
|
||||
for(var k: any = j-1; k>=0; k--) {
|
||||
this.mtfSymbol[k+1] = this.mtfSymbol[k];
|
||||
}
|
||||
this.mtfSymbol[0] = uc;
|
||||
this.selectors[i] = uc;
|
||||
if (nSelectors == 0) messageArg.Error('meh');
|
||||
for (var i = 0; i < groupCount; i++) this.mtfSymbol[i] = i;
|
||||
|
||||
for (var i = 0; i < nSelectors; i++) {
|
||||
for (var j = 0; bits(1); j++)
|
||||
if (j >= groupCount) messageArg.Error('whoops another error');
|
||||
var uc = this.mtfSymbol[j];
|
||||
for (var k: any = j - 1; k >= 0; k--) {
|
||||
this.mtfSymbol[k + 1] = this.mtfSymbol[k];
|
||||
}
|
||||
this.mtfSymbol[0] = uc;
|
||||
this.selectors[i] = uc;
|
||||
}
|
||||
|
||||
|
||||
var symCount = symTotal + 2;
|
||||
var groups = [];
|
||||
var length = new Uint8Array(MAX_SYMBOLS),
|
||||
temp = new Uint16Array(MAX_HUFCODE_BITS+1);
|
||||
|
||||
temp = new Uint16Array(MAX_HUFCODE_BITS + 1);
|
||||
|
||||
var hufGroup;
|
||||
|
||||
for(var j = 0; j < groupCount; j++) {
|
||||
t = bits(5); //lengths
|
||||
for(var i = 0; i < symCount; i++) {
|
||||
while(true){
|
||||
if (t < 1 || t > MAX_HUFCODE_BITS) messageArg.Error("I gave up a while ago on writing error messages");
|
||||
if (!bits(1)) break;
|
||||
if (!bits(1)) t++;
|
||||
else t--;
|
||||
}
|
||||
length[i] = t;
|
||||
|
||||
for (var j = 0; j < groupCount; j++) {
|
||||
t = bits(5); //lengths
|
||||
for (var i = 0; i < symCount; i++) {
|
||||
while (true) {
|
||||
if (t < 1 || t > MAX_HUFCODE_BITS)
|
||||
messageArg.Error('I gave up a while ago on writing error messages');
|
||||
if (!bits(1)) break;
|
||||
if (!bits(1)) t++;
|
||||
else t--;
|
||||
}
|
||||
var minLen, maxLen;
|
||||
minLen = maxLen = length[0];
|
||||
for(var i = 1; i < symCount; i++) {
|
||||
if (length[i] > maxLen) maxLen = length[i];
|
||||
else if (length[i] < minLen) minLen = length[i];
|
||||
}
|
||||
hufGroup = groups[j] = {};
|
||||
hufGroup.permute = new Int32Array(MAX_SYMBOLS);
|
||||
hufGroup.limit = new Int32Array(MAX_HUFCODE_BITS + 1);
|
||||
hufGroup.base = new Int32Array(MAX_HUFCODE_BITS + 1);
|
||||
|
||||
hufGroup.minLen = minLen;
|
||||
hufGroup.maxLen = maxLen;
|
||||
var base = hufGroup.base;
|
||||
var limit = hufGroup.limit;
|
||||
var pp = 0;
|
||||
for(var i: number = minLen; i <= maxLen; i++)
|
||||
for(var t: any = 0; t < symCount; t++)
|
||||
if (length[t] == i) hufGroup.permute[pp++] = t;
|
||||
for(i = minLen; i <= maxLen; i++) temp[i] = limit[i] = 0;
|
||||
for(i = 0; i < symCount; i++) temp[length[i]]++;
|
||||
pp = t = 0;
|
||||
for(i = minLen; i < maxLen; i++) {
|
||||
pp += temp[i];
|
||||
limit[i] = pp - 1;
|
||||
pp <<= 1;
|
||||
base[i+1] = pp - (t += temp[i]);
|
||||
}
|
||||
limit[maxLen] = pp + temp[maxLen] - 1;
|
||||
base[minLen] = 0;
|
||||
length[i] = t;
|
||||
}
|
||||
var minLen, maxLen;
|
||||
minLen = maxLen = length[0];
|
||||
for (var i = 1; i < symCount; i++) {
|
||||
if (length[i] > maxLen) maxLen = length[i];
|
||||
else if (length[i] < minLen) minLen = length[i];
|
||||
}
|
||||
hufGroup = groups[j] = {};
|
||||
hufGroup.permute = new Int32Array(MAX_SYMBOLS);
|
||||
hufGroup.limit = new Int32Array(MAX_HUFCODE_BITS + 1);
|
||||
hufGroup.base = new Int32Array(MAX_HUFCODE_BITS + 1);
|
||||
|
||||
hufGroup.minLen = minLen;
|
||||
hufGroup.maxLen = maxLen;
|
||||
var base = hufGroup.base;
|
||||
var limit = hufGroup.limit;
|
||||
var pp = 0;
|
||||
for (var i: number = minLen; i <= maxLen; i++)
|
||||
for (var t: any = 0; t < symCount; t++)
|
||||
if (length[t] == i) hufGroup.permute[pp++] = t;
|
||||
for (i = minLen; i <= maxLen; i++) temp[i] = limit[i] = 0;
|
||||
for (i = 0; i < symCount; i++) temp[length[i]]++;
|
||||
pp = t = 0;
|
||||
for (i = minLen; i < maxLen; i++) {
|
||||
pp += temp[i];
|
||||
limit[i] = pp - 1;
|
||||
pp <<= 1;
|
||||
base[i + 1] = pp - (t += temp[i]);
|
||||
}
|
||||
limit[maxLen] = pp + temp[maxLen] - 1;
|
||||
base[minLen] = 0;
|
||||
}
|
||||
|
||||
for(var i = 0; i < 256; i++) {
|
||||
this.mtfSymbol[i] = i;
|
||||
this.byteCount[i] = 0;
|
||||
|
||||
for (var i = 0; i < 256; i++) {
|
||||
this.mtfSymbol[i] = i;
|
||||
this.byteCount[i] = 0;
|
||||
}
|
||||
var runPos, count, symCount: number, selector;
|
||||
runPos = count = symCount = selector = 0;
|
||||
while(true) {
|
||||
if (!(symCount--)) {
|
||||
symCount = GROUP_SIZE - 1;
|
||||
if (selector >= nSelectors) messageArg.Error("meow i'm a kitty, that's an error");
|
||||
hufGroup = groups[this.selectors[selector++]];
|
||||
base = hufGroup.base;
|
||||
limit = hufGroup.limit;
|
||||
runPos = count = symCount = selector = 0;
|
||||
while (true) {
|
||||
if (!symCount--) {
|
||||
symCount = GROUP_SIZE - 1;
|
||||
if (selector >= nSelectors)
|
||||
messageArg.Error("meow i'm a kitty, that's an error");
|
||||
hufGroup = groups[this.selectors[selector++]];
|
||||
base = hufGroup.base;
|
||||
limit = hufGroup.limit;
|
||||
}
|
||||
i = hufGroup.minLen;
|
||||
j = bits(i);
|
||||
while (true) {
|
||||
if (i > hufGroup.maxLen) messageArg.Error("rawr i'm a dinosaur");
|
||||
if (j <= limit[i]) break;
|
||||
i++;
|
||||
j = (j << 1) | bits(1);
|
||||
}
|
||||
j -= base[i];
|
||||
if (j < 0 || j >= MAX_SYMBOLS) messageArg.Error("moo i'm a cow");
|
||||
var nextSym = hufGroup.permute[j];
|
||||
if (nextSym == SYMBOL_RUNA || nextSym == SYMBOL_RUNB) {
|
||||
if (!runPos) {
|
||||
runPos = 1;
|
||||
t = 0;
|
||||
}
|
||||
i = hufGroup.minLen;
|
||||
j = bits(i);
|
||||
while(true) {
|
||||
if (i > hufGroup.maxLen) messageArg.Error("rawr i'm a dinosaur");
|
||||
if (j <= limit[i]) break;
|
||||
i++;
|
||||
j = (j << 1) | bits(1);
|
||||
}
|
||||
j -= base[i];
|
||||
if (j < 0 || j >= MAX_SYMBOLS) messageArg.Error("moo i'm a cow");
|
||||
var nextSym = hufGroup.permute[j];
|
||||
if (nextSym == SYMBOL_RUNA || nextSym == SYMBOL_RUNB) {
|
||||
if (!runPos){
|
||||
runPos = 1;
|
||||
t = 0;
|
||||
}
|
||||
if (nextSym == SYMBOL_RUNA) t += runPos;
|
||||
else t += 2 * runPos;
|
||||
runPos <<= 1;
|
||||
continue;
|
||||
}
|
||||
if (runPos) {
|
||||
runPos = 0;
|
||||
if (count + t > bufsize) messageArg.Error("Boom.");
|
||||
uc = this.symToByte[this.mtfSymbol[0]];
|
||||
this.byteCount[uc] += t;
|
||||
while(t--) buf[count++] = uc;
|
||||
}
|
||||
if (nextSym > symTotal) break;
|
||||
if (count >= bufsize) messageArg.Error("I can't think of anything. Error");
|
||||
i = nextSym - 1;
|
||||
uc = this.mtfSymbol[i];
|
||||
for(var k: any = i-1; k>=0; k--) {
|
||||
this.mtfSymbol[k+1] = this.mtfSymbol[k];
|
||||
}
|
||||
this.mtfSymbol[0] = uc
|
||||
uc = this.symToByte[uc];
|
||||
this.byteCount[uc]++;
|
||||
buf[count++] = uc;
|
||||
if (nextSym == SYMBOL_RUNA) t += runPos;
|
||||
else t += 2 * runPos;
|
||||
runPos <<= 1;
|
||||
continue;
|
||||
}
|
||||
if (runPos) {
|
||||
runPos = 0;
|
||||
if (count + t > bufsize) messageArg.Error('Boom.');
|
||||
uc = this.symToByte[this.mtfSymbol[0]];
|
||||
this.byteCount[uc] += t;
|
||||
while (t--) buf[count++] = uc;
|
||||
}
|
||||
if (nextSym > symTotal) break;
|
||||
if (count >= bufsize)
|
||||
messageArg.Error("I can't think of anything. Error");
|
||||
i = nextSym - 1;
|
||||
uc = this.mtfSymbol[i];
|
||||
for (var k: any = i - 1; k >= 0; k--) {
|
||||
this.mtfSymbol[k + 1] = this.mtfSymbol[k];
|
||||
}
|
||||
this.mtfSymbol[0] = uc;
|
||||
uc = this.symToByte[uc];
|
||||
this.byteCount[uc]++;
|
||||
buf[count++] = uc;
|
||||
}
|
||||
if (origPtr < 0 || origPtr >= count) messageArg.Error("I'm a monkey and I'm throwing something at someone, namely you");
|
||||
if (origPtr < 0 || origPtr >= count)
|
||||
messageArg.Error(
|
||||
"I'm a monkey and I'm throwing something at someone, namely you",
|
||||
);
|
||||
var j = 0;
|
||||
for(var i = 0; i < 256; i++) {
|
||||
k = j + this.byteCount[i];
|
||||
this.byteCount[i] = j;
|
||||
j = k;
|
||||
for (var i = 0; i < 256; i++) {
|
||||
k = j + this.byteCount[i];
|
||||
this.byteCount[i] = j;
|
||||
j = k;
|
||||
}
|
||||
for(var i = 0; i < count; i++) {
|
||||
uc = buf[i] & 0xff;
|
||||
buf[this.byteCount[uc]] |= (i << 8);
|
||||
this.byteCount[uc]++;
|
||||
for (var i = 0; i < count; i++) {
|
||||
uc = buf[i] & 0xff;
|
||||
buf[this.byteCount[uc]] |= i << 8;
|
||||
this.byteCount[uc]++;
|
||||
}
|
||||
var pos = 0, current = 0, run = 0;
|
||||
var pos = 0,
|
||||
current = 0,
|
||||
run = 0;
|
||||
if (count) {
|
||||
pos = buf[origPtr];
|
||||
current = (pos & 0xff);
|
||||
pos >>= 8;
|
||||
run = -1;
|
||||
pos = buf[origPtr];
|
||||
current = pos & 0xff;
|
||||
pos >>= 8;
|
||||
run = -1;
|
||||
}
|
||||
count = count;
|
||||
var copies, previous, outbyte;
|
||||
while(count) {
|
||||
count--;
|
||||
previous = current;
|
||||
pos = buf[pos];
|
||||
current = pos & 0xff;
|
||||
pos >>= 8;
|
||||
if (run++ == 3) {
|
||||
copies = current;
|
||||
outbyte = previous;
|
||||
current = -1;
|
||||
} else {
|
||||
copies = 1;
|
||||
outbyte = current;
|
||||
}
|
||||
while(copies--) {
|
||||
crc = ((crc << 8) ^ this.crcTable[((crc>>24) ^ outbyte) & 0xFF])&0xFFFFFFFF; // crc32
|
||||
stream(outbyte);
|
||||
}
|
||||
if (current != previous) run = 0;
|
||||
while (count) {
|
||||
count--;
|
||||
previous = current;
|
||||
pos = buf[pos];
|
||||
current = pos & 0xff;
|
||||
pos >>= 8;
|
||||
if (run++ == 3) {
|
||||
copies = current;
|
||||
outbyte = previous;
|
||||
current = -1;
|
||||
} else {
|
||||
copies = 1;
|
||||
outbyte = current;
|
||||
}
|
||||
while (copies--) {
|
||||
crc =
|
||||
((crc << 8) ^ this.crcTable[((crc >> 24) ^ outbyte) & 0xff]) &
|
||||
0xffffffff; // crc32
|
||||
stream(outbyte);
|
||||
}
|
||||
if (current != previous) run = 0;
|
||||
}
|
||||
|
||||
crc = (crc ^ (-1)) >>> 0;
|
||||
if ((crc|0) != (crcblock|0)) messageArg.Error("Error in bzip2: crc32 do not match");
|
||||
streamCRC = (crc ^ ((streamCRC << 1) | (streamCRC >>> 31))) & 0xFFFFFFFF;
|
||||
|
||||
crc = (crc ^ -1) >>> 0;
|
||||
if ((crc | 0) != (crcblock | 0))
|
||||
messageArg.Error('Error in bzip2: crc32 do not match');
|
||||
streamCRC = (crc ^ ((streamCRC << 1) | (streamCRC >>> 31))) & 0xffffffff;
|
||||
return streamCRC;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
@@ -26,7 +26,13 @@ export function unbzip2Stream() {
|
||||
chunk.push(b);
|
||||
};
|
||||
|
||||
streamCRC = bzip2Instance.decompress(bitReader, f, buf, bufsize, streamCRC);
|
||||
streamCRC = bzip2Instance.decompress(
|
||||
bitReader,
|
||||
f,
|
||||
buf,
|
||||
bufsize,
|
||||
streamCRC,
|
||||
);
|
||||
if (streamCRC === null) {
|
||||
// reset for next bzip2 header
|
||||
blockSize = 0;
|
||||
@@ -66,7 +72,10 @@ export function unbzip2Stream() {
|
||||
return bufferQueue.shift();
|
||||
});
|
||||
}
|
||||
while (!broken && hasBytes - bitReader.bytesRead + 1 >= (25000 + 100000 * blockSize || 4)) {
|
||||
while (
|
||||
!broken &&
|
||||
hasBytes - bitReader.bytesRead + 1 >= (25000 + 100000 * blockSize || 4)
|
||||
) {
|
||||
//console.error('decompressing with', hasBytes - bitReader.bytesRead + 1, 'bytes in buffer');
|
||||
const result = await decompressAndPush();
|
||||
if (!result) {
|
||||
@@ -86,7 +95,8 @@ export function unbzip2Stream() {
|
||||
await streamTools.push(result);
|
||||
}
|
||||
if (!broken) {
|
||||
if (streamCRC !== null) this.emit('error', new Error('input stream ended prematurely'));
|
||||
if (streamCRC !== null)
|
||||
this.emit('error', new Error('input stream ended prematurely'));
|
||||
}
|
||||
},
|
||||
});
|
||||
|
@@ -5,7 +5,10 @@ export interface IAnalyzedResult {
|
||||
fileType: plugins.fileType.FileTypeResult;
|
||||
isArchive: boolean;
|
||||
resultStream: plugins.smartstream.SmartDuplex;
|
||||
decompressionStream: plugins.stream.Transform | plugins.stream.Duplex | plugins.tarStream.Extract;
|
||||
decompressionStream:
|
||||
| plugins.stream.Transform
|
||||
| plugins.stream.Duplex
|
||||
| plugins.tarStream.Extract;
|
||||
}
|
||||
|
||||
export class ArchiveAnalyzer {
|
||||
@@ -25,14 +28,15 @@ export class ArchiveAnalyzer {
|
||||
'application/x-bzip2',
|
||||
// Add other archive mime types here
|
||||
]);
|
||||
|
||||
|
||||
return archiveMimeTypes.has(mimeType);
|
||||
}
|
||||
|
||||
|
||||
private async getDecompressionStream(
|
||||
mimeTypeArg: plugins.fileType.FileTypeResult['mime']
|
||||
): Promise<plugins.stream.Transform | plugins.stream.Duplex | plugins.tarStream.Extract> {
|
||||
mimeTypeArg: plugins.fileType.FileTypeResult['mime'],
|
||||
): Promise<
|
||||
plugins.stream.Transform | plugins.stream.Duplex | plugins.tarStream.Extract
|
||||
> {
|
||||
switch (mimeTypeArg) {
|
||||
case 'application/gzip':
|
||||
return this.smartArchiveRef.gzipTools.getDecompressionStream();
|
||||
@@ -51,13 +55,18 @@ export class ArchiveAnalyzer {
|
||||
public getAnalyzedStream() {
|
||||
let firstRun = true;
|
||||
const resultStream = plugins.smartstream.createPassThrough();
|
||||
const analyzerstream = new plugins.smartstream.SmartDuplex<Buffer, IAnalyzedResult>({
|
||||
const analyzerstream = new plugins.smartstream.SmartDuplex<
|
||||
Buffer,
|
||||
IAnalyzedResult
|
||||
>({
|
||||
readableObjectMode: true,
|
||||
writeFunction: async (chunkArg: Buffer, streamtools) => {
|
||||
if (firstRun) {
|
||||
firstRun = false;
|
||||
const fileType = await plugins.fileType.fileTypeFromBuffer(chunkArg);
|
||||
const decompressionStream = await this.getDecompressionStream(fileType?.mime as any);
|
||||
const decompressionStream = await this.getDecompressionStream(
|
||||
fileType?.mime as any,
|
||||
);
|
||||
/**
|
||||
* analyzed stream emits once with this object
|
||||
*/
|
||||
@@ -75,7 +84,7 @@ export class ArchiveAnalyzer {
|
||||
finalFunction: async (tools) => {
|
||||
resultStream.push(null);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
});
|
||||
return analyzerstream;
|
||||
}
|
||||
|
@@ -13,4 +13,4 @@ export class Bzip2Tools {
|
||||
getDecompressionStream() {
|
||||
return unbzip2Stream();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,5 +1,5 @@
|
||||
import type { SmartArchive } from './classes.smartarchive.js';
|
||||
import * as plugins from './plugins.js'
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
// This class wraps fflate's gunzip in a Node.js Transform stream
|
||||
export class CompressGunzipTransform extends plugins.stream.Transform {
|
||||
@@ -7,7 +7,11 @@ export class CompressGunzipTransform extends plugins.stream.Transform {
|
||||
super();
|
||||
}
|
||||
|
||||
_transform(chunk: Buffer, encoding: BufferEncoding, callback: plugins.stream.TransformCallback) {
|
||||
_transform(
|
||||
chunk: Buffer,
|
||||
encoding: BufferEncoding,
|
||||
callback: plugins.stream.TransformCallback,
|
||||
) {
|
||||
plugins.fflate.gunzip(chunk, (err, decompressed) => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
@@ -22,32 +26,49 @@ export class CompressGunzipTransform extends plugins.stream.Transform {
|
||||
// DecompressGunzipTransform class that extends the Node.js Transform stream to
|
||||
// create a stream that decompresses GZip-compressed data using fflate's gunzip function
|
||||
export class DecompressGunzipTransform extends plugins.stream.Transform {
|
||||
private gunzip: any; // fflate.Gunzip instance
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
|
||||
_transform(chunk: Buffer, encoding: BufferEncoding, callback: plugins.stream.TransformCallback) {
|
||||
// Use fflate's gunzip function to decompress the chunk
|
||||
plugins.fflate.gunzip(chunk, (err, decompressed) => {
|
||||
if (err) {
|
||||
// If an error occurs during decompression, pass the error to the callback
|
||||
callback(err);
|
||||
} else {
|
||||
// If decompression is successful, push the decompressed data into the stream
|
||||
this.push(decompressed);
|
||||
callback();
|
||||
|
||||
// Create a streaming Gunzip decompressor
|
||||
this.gunzip = new plugins.fflate.Gunzip((chunk, final) => {
|
||||
// Push decompressed chunks to the output stream
|
||||
this.push(Buffer.from(chunk));
|
||||
if (final) {
|
||||
// Signal end of stream when decompression is complete
|
||||
this.push(null);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_transform(
|
||||
chunk: Buffer,
|
||||
encoding: BufferEncoding,
|
||||
callback: plugins.stream.TransformCallback,
|
||||
) {
|
||||
try {
|
||||
// Feed chunks to the gunzip stream
|
||||
this.gunzip.push(chunk, false);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
}
|
||||
|
||||
_flush(callback: plugins.stream.TransformCallback) {
|
||||
try {
|
||||
// Signal end of input to gunzip
|
||||
this.gunzip.push(new Uint8Array(0), true);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export class GzipTools {
|
||||
smartArchiveRef: SmartArchive;
|
||||
|
||||
constructor(smartArchiveRefArg: SmartArchive) {
|
||||
this.smartArchiveRef = smartArchiveRefArg;
|
||||
}
|
||||
constructor() {}
|
||||
|
||||
public getCompressionStream() {
|
||||
return new CompressGunzipTransform();
|
||||
|
@@ -6,7 +6,10 @@ import { GzipTools } from './classes.gziptools.js';
|
||||
import { TarTools } from './classes.tartools.js';
|
||||
import { ZipTools } from './classes.ziptools.js';
|
||||
|
||||
import { ArchiveAnalyzer, type IAnalyzedResult } from './classes.archiveanalyzer.js';
|
||||
import {
|
||||
ArchiveAnalyzer,
|
||||
type IAnalyzedResult,
|
||||
} from './classes.archiveanalyzer.js';
|
||||
|
||||
import type { from } from '@push.rocks/smartrx/dist_ts/smartrx.plugins.rxjs.js';
|
||||
|
||||
@@ -18,14 +21,19 @@ export class SmartArchive {
|
||||
return smartArchiveInstance;
|
||||
}
|
||||
|
||||
public static async fromArchiveFile(filePathArg: string): Promise<SmartArchive> {
|
||||
public static async fromArchiveFile(
|
||||
filePathArg: string,
|
||||
): Promise<SmartArchive> {
|
||||
const smartArchiveInstance = new SmartArchive();
|
||||
smartArchiveInstance.sourceFilePath = filePathArg;
|
||||
return smartArchiveInstance;
|
||||
}
|
||||
|
||||
public static async fromArchiveStream(
|
||||
streamArg: plugins.stream.Readable | plugins.stream.Duplex | plugins.stream.Transform
|
||||
streamArg:
|
||||
| plugins.stream.Readable
|
||||
| plugins.stream.Duplex
|
||||
| plugins.stream.Transform,
|
||||
): Promise<SmartArchive> {
|
||||
const smartArchiveInstance = new SmartArchive();
|
||||
smartArchiveInstance.sourceStream = streamArg;
|
||||
@@ -33,21 +41,27 @@ export class SmartArchive {
|
||||
}
|
||||
|
||||
// INSTANCE
|
||||
public gzipTools = new GzipTools(this);
|
||||
public tarTools = new TarTools();
|
||||
public zipTools = new ZipTools();
|
||||
public gzipTools = new GzipTools();
|
||||
public bzip2Tools = new Bzip2Tools(this);
|
||||
public tarTools = new TarTools(this);
|
||||
public zipTools = new ZipTools(this);
|
||||
public archiveAnalyzer = new ArchiveAnalyzer(this);
|
||||
|
||||
public sourceUrl: string;
|
||||
public sourceFilePath: string;
|
||||
public sourceStream: plugins.stream.Readable | plugins.stream.Duplex | plugins.stream.Transform;
|
||||
public sourceStream:
|
||||
| plugins.stream.Readable
|
||||
| plugins.stream.Duplex
|
||||
| plugins.stream.Transform;
|
||||
|
||||
public archiveName: string;
|
||||
public singleFileMode: boolean = false;
|
||||
|
||||
public addedDirectories: string[] = [];
|
||||
public addedFiles: (plugins.smartfile.SmartFile | plugins.smartfile.StreamFile)[] = [];
|
||||
public addedFiles: (
|
||||
| plugins.smartfile.SmartFile
|
||||
| plugins.smartfile.StreamFile
|
||||
)[] = [];
|
||||
public addedUrls: string[] = [];
|
||||
|
||||
constructor() {}
|
||||
@@ -60,7 +74,12 @@ export class SmartArchive {
|
||||
return this.sourceStream;
|
||||
}
|
||||
if (this.sourceUrl) {
|
||||
const urlStream = await plugins.smartrequest.getStream(this.sourceUrl);
|
||||
const response = await plugins.smartrequest.SmartRequest.create()
|
||||
.url(this.sourceUrl)
|
||||
.get();
|
||||
const webStream = response.stream();
|
||||
// @ts-ignore - Web stream to Node.js stream conversion
|
||||
const urlStream = plugins.stream.Readable.fromWeb(webStream);
|
||||
return urlStream;
|
||||
}
|
||||
if (this.sourceFilePath) {
|
||||
@@ -76,24 +95,35 @@ export class SmartArchive {
|
||||
// return archiveStream;
|
||||
}
|
||||
|
||||
public async exportToFs(targetDir: string, fileNameArg?: string): Promise<void> {
|
||||
public async exportToFs(
|
||||
targetDir: string,
|
||||
fileNameArg?: string,
|
||||
): Promise<void> {
|
||||
const done = plugins.smartpromise.defer<void>();
|
||||
const streamFileStream = await this.exportToStreamOfStreamFiles();
|
||||
streamFileStream.pipe(
|
||||
new plugins.smartstream.SmartDuplex({
|
||||
objectMode: true,
|
||||
writeFunction: async (streamFileArg: plugins.smartfile.StreamFile, streamtools) => {
|
||||
writeFunction: async (
|
||||
streamFileArg: plugins.smartfile.StreamFile,
|
||||
streamtools,
|
||||
) => {
|
||||
const done = plugins.smartpromise.defer<void>();
|
||||
console.log(streamFileArg.relativeFilePath ? streamFileArg.relativeFilePath : 'no relative path');
|
||||
console.log(
|
||||
streamFileArg.relativeFilePath
|
||||
? streamFileArg.relativeFilePath
|
||||
: 'no relative path',
|
||||
);
|
||||
const streamFile = streamFileArg;
|
||||
const readStream = await streamFile.createReadStream();
|
||||
await plugins.smartfile.fs.ensureDir(targetDir);
|
||||
const writePath = plugins.path.join(
|
||||
targetDir,
|
||||
streamFile.relativeFilePath || fileNameArg
|
||||
streamFile.relativeFilePath || fileNameArg,
|
||||
);
|
||||
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(writePath));
|
||||
const writeStream = plugins.smartfile.fsStream.createWriteStream(writePath);
|
||||
const writeStream =
|
||||
plugins.smartfile.fsStream.createWriteStream(writePath);
|
||||
readStream.pipe(writeStream);
|
||||
writeStream.on('finish', () => {
|
||||
done.resolve();
|
||||
@@ -103,15 +133,16 @@ export class SmartArchive {
|
||||
finalFunction: async () => {
|
||||
done.resolve();
|
||||
},
|
||||
})
|
||||
}),
|
||||
);
|
||||
return done.promise;
|
||||
}
|
||||
|
||||
public async exportToStreamOfStreamFiles() {
|
||||
const streamFileIntake = new plugins.smartstream.StreamIntake<plugins.smartfile.StreamFile>({
|
||||
objectMode: true,
|
||||
});
|
||||
const streamFileIntake =
|
||||
new plugins.smartstream.StreamIntake<plugins.smartfile.StreamFile>({
|
||||
objectMode: true,
|
||||
});
|
||||
const archiveStream = await this.getArchiveStream();
|
||||
const createAnalyzedStream = () => this.archiveAnalyzer.getAnalyzedStream();
|
||||
|
||||
@@ -120,15 +151,21 @@ export class SmartArchive {
|
||||
plugins.smartstream.createTransformFunction<IAnalyzedResult, any>(
|
||||
async (analyzedResultChunk) => {
|
||||
if (analyzedResultChunk.fileType?.mime === 'application/x-tar') {
|
||||
const tarStream = analyzedResultChunk.decompressionStream as plugins.tarStream.Extract;
|
||||
const tarStream =
|
||||
analyzedResultChunk.decompressionStream as plugins.tarStream.Extract;
|
||||
tarStream.on('entry', async (header, stream, next) => {
|
||||
if (header.type === 'directory') {
|
||||
console.log(`tar stream directory: ${header.name} ... skipping!`);
|
||||
console.log(
|
||||
`tar stream directory: ${header.name} ... skipping!`,
|
||||
);
|
||||
next();
|
||||
return;
|
||||
}
|
||||
console.log(`tar stream file: ${header.name}`);
|
||||
const streamfile = plugins.smartfile.StreamFile.fromStream(stream, header.name);
|
||||
const streamfile = plugins.smartfile.StreamFile.fromStream(
|
||||
stream,
|
||||
header.name,
|
||||
);
|
||||
streamFileIntake.push(streamfile);
|
||||
stream.on('end', function () {
|
||||
next(); // ready for next entry
|
||||
@@ -138,20 +175,30 @@ export class SmartArchive {
|
||||
console.log('finished');
|
||||
streamFileIntake.signalEnd();
|
||||
});
|
||||
analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream);
|
||||
analyzedResultChunk.resultStream.pipe(
|
||||
analyzedResultChunk.decompressionStream,
|
||||
);
|
||||
} else if (analyzedResultChunk.fileType?.mime === 'application/zip') {
|
||||
analyzedResultChunk.resultStream
|
||||
.pipe(analyzedResultChunk.decompressionStream)
|
||||
.pipe(new plugins.smartstream.SmartDuplex({
|
||||
objectMode: true,
|
||||
writeFunction: async (streamFileArg: plugins.smartfile.StreamFile, streamtools) => {
|
||||
streamFileIntake.push(streamFileArg);
|
||||
},
|
||||
finalFunction: async () => {
|
||||
streamFileIntake.signalEnd();
|
||||
}
|
||||
}));
|
||||
} else if (analyzedResultChunk.isArchive && analyzedResultChunk.decompressionStream) {
|
||||
.pipe(
|
||||
new plugins.smartstream.SmartDuplex({
|
||||
objectMode: true,
|
||||
writeFunction: async (
|
||||
streamFileArg: plugins.smartfile.StreamFile,
|
||||
streamtools,
|
||||
) => {
|
||||
streamFileIntake.push(streamFileArg);
|
||||
},
|
||||
finalFunction: async () => {
|
||||
streamFileIntake.signalEnd();
|
||||
},
|
||||
}),
|
||||
);
|
||||
} else if (
|
||||
analyzedResultChunk.isArchive &&
|
||||
analyzedResultChunk.decompressionStream
|
||||
) {
|
||||
analyzedResultChunk.resultStream
|
||||
.pipe(analyzedResultChunk.decompressionStream)
|
||||
.pipe(createAnalyzedStream())
|
||||
@@ -159,7 +206,7 @@ export class SmartArchive {
|
||||
} else {
|
||||
const streamFile = plugins.smartfile.StreamFile.fromStream(
|
||||
analyzedResultChunk.resultStream,
|
||||
analyzedResultChunk.fileType?.ext
|
||||
analyzedResultChunk.fileType?.ext,
|
||||
);
|
||||
streamFileIntake.push(streamFile);
|
||||
streamFileIntake.signalEnd();
|
||||
@@ -167,7 +214,7 @@ export class SmartArchive {
|
||||
},
|
||||
{
|
||||
objectMode: true,
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
archiveStream.pipe(createAnalyzedStream()).pipe(createUnpackStream());
|
||||
|
@@ -3,11 +3,7 @@ import * as plugins from './plugins.js';
|
||||
|
||||
export class TarTools {
|
||||
// INSTANCE
|
||||
smartArchiveRef: SmartArchive;
|
||||
|
||||
constructor(smartArchiveRefArg: SmartArchive) {
|
||||
this.smartArchiveRef = smartArchiveRefArg;
|
||||
}
|
||||
constructor() {}
|
||||
|
||||
// packing
|
||||
public async addFileToPack(
|
||||
@@ -22,15 +18,21 @@ export class TarTools {
|
||||
| plugins.smartfile.StreamFile;
|
||||
byteLength?: number;
|
||||
filePath?: string;
|
||||
}
|
||||
},
|
||||
): Promise<void> {
|
||||
return new Promise<void>(async (resolve, reject) => {
|
||||
let fileName =
|
||||
optionsArg.fileName || optionsArg.content instanceof plugins.smartfile.SmartFile
|
||||
? (optionsArg.content as plugins.smartfile.SmartFile).relative
|
||||
: null || optionsArg.content instanceof plugins.smartfile.StreamFile
|
||||
? (optionsArg.content as plugins.smartfile.StreamFile).relativeFilePath
|
||||
: null || optionsArg.filePath;
|
||||
let fileName: string | null = null;
|
||||
|
||||
if (optionsArg.fileName) {
|
||||
fileName = optionsArg.fileName;
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
|
||||
fileName = (optionsArg.content as plugins.smartfile.SmartFile).relative;
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
|
||||
fileName = (optionsArg.content as plugins.smartfile.StreamFile)
|
||||
.relativeFilePath;
|
||||
} else if (optionsArg.filePath) {
|
||||
fileName = optionsArg.filePath;
|
||||
}
|
||||
|
||||
/**
|
||||
* contentByteLength is used to set the size of the entry in the tar file
|
||||
@@ -46,15 +48,39 @@ export class TarTools {
|
||||
contentByteLength = await optionsArg.content.getSize(); // assuming SmartFile has getSize method
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
|
||||
contentByteLength = await optionsArg.content.getSize(); // assuming StreamFile has getSize method
|
||||
} else if (optionsArg.content instanceof plugins.smartstream.stream.Readable) {
|
||||
} else if (
|
||||
optionsArg.content instanceof plugins.smartstream.stream.Readable
|
||||
) {
|
||||
console.warn(
|
||||
'@push.rocks/smartarchive: When streaming, it is recommended to provide byteLength, if known.'
|
||||
'@push.rocks/smartarchive: When streaming, it is recommended to provide byteLength, if known.',
|
||||
);
|
||||
} else if (optionsArg.filePath) {
|
||||
const fileStat = await plugins.smartfile.fs.stat(optionsArg.filePath);
|
||||
contentByteLength = fileStat.size;
|
||||
}
|
||||
|
||||
/**
|
||||
* here we try to harmonize all kind of entries towards a readable stream
|
||||
*/
|
||||
let content: plugins.smartstream.stream.Readable;
|
||||
if (Buffer.isBuffer(optionsArg.content)) {
|
||||
content = plugins.smartstream.stream.Readable.from(optionsArg.content);
|
||||
} else if (typeof optionsArg.content === 'string') {
|
||||
content = plugins.smartstream.stream.Readable.from(
|
||||
Buffer.from(optionsArg.content),
|
||||
);
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
|
||||
content = plugins.smartstream.stream.Readable.from(
|
||||
optionsArg.content.contents,
|
||||
);
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
|
||||
content = await optionsArg.content.createReadStream();
|
||||
} else if (
|
||||
optionsArg.content instanceof plugins.smartstream.stream.Readable
|
||||
) {
|
||||
content = optionsArg.content;
|
||||
}
|
||||
|
||||
const entry = pack.entry(
|
||||
{
|
||||
name: fileName,
|
||||
@@ -70,26 +96,11 @@ export class TarTools {
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
let content: plugins.smartstream.stream.Readable;
|
||||
if (Buffer.isBuffer(optionsArg.content)) {
|
||||
content = plugins.smartstream.stream.Readable.from(optionsArg.content);
|
||||
} else if (typeof optionsArg.content === 'string') {
|
||||
content = plugins.smartstream.stream.Readable.from(Buffer.from(optionsArg.content));
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
|
||||
content = plugins.smartstream.stream.Readable.from(optionsArg.content.contents);
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
|
||||
content = await optionsArg.content.createReadStream();
|
||||
} else if (optionsArg.content instanceof plugins.smartstream.stream.Readable) {
|
||||
content = optionsArg.content;
|
||||
}
|
||||
|
||||
content.pipe(entry);
|
||||
entry.on('end', () => {
|
||||
resolve();
|
||||
});
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
|
||||
@@ -98,18 +109,22 @@ export class TarTools {
|
||||
* @param directoryPath
|
||||
*/
|
||||
public async packDirectory(directoryPath: string) {
|
||||
const fileTree = await plugins.smartfile.fs.listFileTree(directoryPath, '**/*');
|
||||
const fileTree = await plugins.smartfile.fs.listFileTree(
|
||||
directoryPath,
|
||||
'**/*',
|
||||
);
|
||||
const pack = await this.getPackStream();
|
||||
for (const filePath of fileTree) {
|
||||
const absolutePath = plugins.path.join(directoryPath, filePath);
|
||||
const fileStat = await plugins.smartfile.fs.stat(absolutePath);
|
||||
await this.addFileToPack(pack, {
|
||||
byteLength: fileStat.size,
|
||||
filePath: filePath,
|
||||
filePath: absolutePath,
|
||||
fileName: filePath,
|
||||
content: plugins.smartfile.fsStream.createReadStream(absolutePath),
|
||||
});
|
||||
}
|
||||
return pack;
|
||||
}
|
||||
|
||||
public async getPackStream() {
|
||||
|
@@ -1,33 +1,39 @@
|
||||
import type { SmartArchive } from './classes.smartarchive.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
class DecompressZipTransform extends plugins.smartstream.SmartDuplex<ArrayBufferLike> {
|
||||
class DecompressZipTransform extends plugins.smartstream
|
||||
.SmartDuplex<ArrayBufferLike> {
|
||||
private streamtools: plugins.smartstream.IStreamTools;
|
||||
private unzipper = new plugins.fflate.Unzip(async (fileArg) => {
|
||||
let resultBuffer: Buffer;
|
||||
fileArg.ondata = async (flateError, dat, final) => {
|
||||
resultBuffer? resultBuffer = Buffer.concat([resultBuffer, Buffer.from(dat)])
|
||||
: resultBuffer = Buffer.from(dat);
|
||||
resultBuffer
|
||||
? (resultBuffer = Buffer.concat([resultBuffer, Buffer.from(dat)]))
|
||||
: (resultBuffer = Buffer.from(dat));
|
||||
if (final) {
|
||||
const streamFile = plugins.smartfile.StreamFile.fromBuffer(resultBuffer);
|
||||
const streamFile =
|
||||
plugins.smartfile.StreamFile.fromBuffer(resultBuffer);
|
||||
streamFile.relativeFilePath = fileArg.name;
|
||||
this.streamtools.push(streamFile);
|
||||
}
|
||||
}
|
||||
};
|
||||
fileArg.start();
|
||||
});
|
||||
constructor() {
|
||||
super({
|
||||
objectMode: true,
|
||||
writeFunction: async (chunkArg: Buffer, streamtoolsArg) => {
|
||||
this.streamtools? null : this.streamtools = streamtoolsArg;
|
||||
this.unzipper.push(chunkArg, false);
|
||||
writeFunction: async (chunkArg, streamtoolsArg) => {
|
||||
this.streamtools ? null : (this.streamtools = streamtoolsArg);
|
||||
this.unzipper.push(
|
||||
Buffer.isBuffer(chunkArg) ? chunkArg : Buffer.from(chunkArg),
|
||||
false,
|
||||
);
|
||||
},
|
||||
finalFunction: async () => {
|
||||
this.unzipper.push(Buffer.from(''), true);
|
||||
await plugins.smartdelay.delayFor(0);
|
||||
await this.streamtools.push(null);
|
||||
}
|
||||
},
|
||||
});
|
||||
this.unzipper.register(plugins.fflate.UnzipInflate);
|
||||
}
|
||||
@@ -42,7 +48,11 @@ export class CompressZipTransform extends plugins.stream.Transform {
|
||||
this.files = {};
|
||||
}
|
||||
|
||||
_transform(chunk: Buffer, encoding: BufferEncoding, callback: plugins.stream.TransformCallback) {
|
||||
_transform(
|
||||
chunk: Buffer,
|
||||
encoding: BufferEncoding,
|
||||
callback: plugins.stream.TransformCallback,
|
||||
) {
|
||||
// Simple example: storing chunks in memory before finalizing ZIP in _flush
|
||||
this.files['file.txt'] = new Uint8Array(chunk);
|
||||
callback();
|
||||
@@ -61,11 +71,7 @@ export class CompressZipTransform extends plugins.stream.Transform {
|
||||
}
|
||||
|
||||
export class ZipTools {
|
||||
smartArchiveRef: SmartArchive;
|
||||
|
||||
constructor(smartArchiveRefArg: SmartArchive) {
|
||||
this.smartArchiveRef = smartArchiveRefArg;
|
||||
}
|
||||
constructor() {}
|
||||
|
||||
public getCompressionStream() {
|
||||
return new CompressZipTransform();
|
||||
|
@@ -1 +1,4 @@
|
||||
export * from './classes.smartarchive.js';
|
||||
export * from './classes.tartools.js';
|
||||
export * from './classes.ziptools.js';
|
||||
export * from './classes.gziptools.js';
|
||||
|
@@ -2,6 +2,6 @@ import * as plugins from './plugins.js';
|
||||
|
||||
export const packageDir = plugins.path.join(
|
||||
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
|
||||
'../'
|
||||
'../',
|
||||
);
|
||||
export const nogitDir = plugins.path.join(packageDir, './.nogit');
|
||||
|
@@ -15,7 +15,17 @@ import * as smartstream from '@push.rocks/smartstream';
|
||||
import * as smartrx from '@push.rocks/smartrx';
|
||||
import * as smarturl from '@push.rocks/smarturl';
|
||||
|
||||
export { smartfile, smartdelay, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx, smarturl };
|
||||
export {
|
||||
smartfile,
|
||||
smartdelay,
|
||||
smartpath,
|
||||
smartpromise,
|
||||
smartrequest,
|
||||
smartunique,
|
||||
smartstream,
|
||||
smartrx,
|
||||
smarturl,
|
||||
};
|
||||
|
||||
// third party scope
|
||||
import * as fileType from 'file-type';
|
||||
|
@@ -6,9 +6,9 @@
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"esModuleInterop": true,
|
||||
"verbatimModuleSyntax": true
|
||||
"verbatimModuleSyntax": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {}
|
||||
},
|
||||
"exclude": [
|
||||
"dist_*/**/*.d.ts"
|
||||
]
|
||||
"exclude": ["dist_*/**/*.d.ts"]
|
||||
}
|
||||
|
Reference in New Issue
Block a user