Compare commits

...

59 Commits

Author SHA1 Message Date
3196a02835 4.0.29 2024-06-08 11:06:10 +02:00
de1c46ed0a fix(core): update 2024-06-08 11:06:09 +02:00
b4c7b065fa 4.0.28 2024-06-08 11:03:38 +02:00
93da11a951 fix(core): update 2024-06-08 11:03:37 +02:00
ecd7f6d419 4.0.27 2024-06-08 11:01:56 +02:00
a3ecfe4d99 fix(core): update 2024-06-08 11:01:56 +02:00
f99f6d96c5 4.0.26 2024-06-08 10:47:28 +02:00
b4be70f43a fix(core): update 2024-06-08 10:47:27 +02:00
785e26e72d 4.0.25 2024-06-08 10:30:05 +02:00
e1891a6aa3 fix(core): update 2024-06-08 10:30:03 +02:00
f257c0c5a4 4.0.24 2024-06-06 20:59:05 +02:00
725546e409 fix(core): update 2024-06-06 20:59:04 +02:00
b9645dfb99 4.0.23 2024-06-06 20:48:03 +02:00
b860aca103 fix(core): update 2024-06-06 20:48:02 +02:00
39fb6e8ad1 update description 2024-05-29 14:11:45 +02:00
04968a80b0 update tsconfig 2024-04-14 17:20:20 +02:00
e4a2c143bc update npmextra.json: githost 2024-04-01 21:33:47 +02:00
ed6d186a85 update npmextra.json: githost 2024-04-01 19:57:39 +02:00
553c5dfe99 update npmextra.json: githost 2024-03-30 21:46:35 +01:00
5d94efb9ee 4.0.22 2024-03-17 00:42:19 +01:00
c978ca107b fix(core): update 2024-03-17 00:42:19 +01:00
876c8ce9d8 4.0.21 2024-03-17 00:35:18 +01:00
7327bf1bd0 fix(core): update 2024-03-17 00:35:17 +01:00
2dcb10d233 4.0.20 2024-03-17 00:29:42 +01:00
d53c46fa82 fix(core): update 2024-03-17 00:29:42 +01:00
25e847a9ea 4.0.19 2023-11-14 13:17:06 +01:00
cc0ecb3f16 fix(core): update 2023-11-14 13:17:05 +01:00
2cd0846c74 4.0.18 2023-11-14 10:58:02 +01:00
49ab40af09 fix(core): update 2023-11-14 10:58:01 +01:00
5ff51ff88d 4.0.17 2023-11-14 10:55:20 +01:00
c578a3fdc1 fix(core): update 2023-11-14 10:55:19 +01:00
ad0352a712 4.0.16 2023-11-13 23:14:39 +01:00
f921338fd6 fix(core): update 2023-11-13 23:14:39 +01:00
614dae5ade 4.0.15 2023-11-13 22:11:25 +01:00
f87359fb97 fix(core): update 2023-11-13 22:11:24 +01:00
21da75c09a 4.0.14 2023-11-13 20:41:52 +01:00
fe49d25765 fix(core): update 2023-11-13 20:41:52 +01:00
5b693c6143 4.0.13 2023-11-13 20:11:10 +01:00
3206738da5 fix(core): update 2023-11-13 20:11:10 +01:00
f709421621 4.0.12 2023-11-13 20:00:29 +01:00
75be95fe45 fix(core): update 2023-11-13 20:00:28 +01:00
1113020e17 4.0.11 2023-11-11 21:06:12 +01:00
adf4bb64ad fix(core): update 2023-11-11 21:06:11 +01:00
0f630382e1 4.0.10 2023-11-11 19:58:20 +01:00
fc09d5aeac fix(core): update 2023-11-11 19:58:19 +01:00
e697730559 4.0.9 2023-11-11 19:55:20 +01:00
0e61bd7c9a fix(core): update 2023-11-11 19:55:19 +01:00
be5f3912cf 4.0.8 2023-11-11 19:31:41 +01:00
bb6fa71b99 fix(core): update 2023-11-11 19:31:40 +01:00
338546ebb2 4.0.7 2023-11-11 18:28:51 +01:00
da22f375d2 fix(core): update 2023-11-11 18:28:50 +01:00
3e23534f9d 4.0.6 2023-11-08 17:01:49 +01:00
1323458130 fix(core): update 2023-11-08 17:01:48 +01:00
95069fe5c0 4.0.5 2023-11-07 09:56:57 +01:00
3ca92d0bf3 fix(core): update 2023-11-07 09:56:57 +01:00
1982d28b89 4.0.4 2023-11-07 04:24:12 +01:00
c1842c051c fix(core): update 2023-11-07 04:24:12 +01:00
dc31577725 4.0.3 2023-11-07 04:19:55 +01:00
32de8087ad fix(core): update 2023-11-07 04:19:54 +01:00
18 changed files with 5838 additions and 3959 deletions

View File

@ -1,6 +1,10 @@
The MIT License (MIT)
Copyright (c) 2016 Lossless GmbH
Copyright (c) 2016 Task Venture Capital GmbH
Includes work under MIT license with copyrights:
* Copyright (c) 2017 by Jan Boelsche (jan@lagomorph.de)
* Copyright 2011 by antimatter15 (antimatter15@gmail.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@ -6,12 +6,24 @@
"gitzone": {
"projectType": "npm",
"module": {
"githost": "gitlab.com",
"githost": "code.foss.global",
"gitscope": "push.rocks",
"gitrepo": "smartarchive",
"description": "work with archives",
"description": "A library for working with archive files, providing utilities for compressing and decompressing data.",
"npmPackagename": "@push.rocks/smartarchive",
"license": "MIT"
"license": "MIT",
"keywords": [
"archive",
"compression",
"decompression",
"zip",
"tar",
"bzip2",
"gzip"
]
}
},
"tsdoc": {
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
}
}

View File

@ -1,7 +1,7 @@
{
"name": "@push.rocks/smartarchive",
"version": "4.0.2",
"description": "work with archives",
"version": "4.0.29",
"description": "A library for working with archive files, providing utilities for compressing and decompressing data.",
"main": "dist_ts/index.js",
"typings": "dist_ts/index.d.ts",
"type": "module",
@ -12,35 +12,35 @@
},
"repository": {
"type": "git",
"url": "git+https://github.com/pushrocks/smartarchive.git"
"url": "https://code.foss.global/push.rocks/smartarchive.git"
},
"author": "Lossless GmbH",
"license": "MIT",
"bugs": {
"url": "https://github.com/pushrocks/smartarchive/issues"
},
"homepage": "https://github.com/pushrocks/smartarchive#readme",
"homepage": "https://code.foss.global/push.rocks/smartarchive",
"dependencies": {
"@push.rocks/smartfile": "^11.0.0",
"@push.rocks/smartpath": "^5.0.11",
"@push.rocks/smartdelay": "^3.0.5",
"@push.rocks/smartfile": "^11.0.20",
"@push.rocks/smartpath": "^5.0.18",
"@push.rocks/smartpromise": "^4.0.3",
"@push.rocks/smartrequest": "^2.0.20",
"@push.rocks/smartrequest": "^2.0.22",
"@push.rocks/smartrx": "^3.0.7",
"@push.rocks/smartstream": "^3.0.7",
"@push.rocks/smartunique": "^3.0.6",
"@push.rocks/smartstream": "^3.0.44",
"@push.rocks/smartunique": "^3.0.9",
"@push.rocks/smarturl": "^3.0.7",
"@types/tar-stream": "^3.1.2",
"@types/unbzip2-stream": "^1.4.2",
"fflate": "^0.8.1",
"file-type": "^18.6.0",
"tar-stream": "^3.1.6",
"unbzip2-stream": "^1.4.3"
"@types/tar-stream": "^3.1.3",
"fflate": "^0.8.2",
"file-type": "^19.0.0",
"tar-stream": "^3.1.7",
"through": "^2.3.8"
},
"devDependencies": {
"@git.zone/tsbuild": "^2.1.66",
"@git.zone/tsbuild": "^2.1.80",
"@git.zone/tsrun": "^1.2.44",
"@git.zone/tstest": "^1.0.77",
"@push.rocks/tapbundle": "^5.0.15"
"@git.zone/tstest": "^1.0.90",
"@push.rocks/tapbundle": "^5.0.23"
},
"private": false,
"files": [
@ -57,5 +57,14 @@
],
"browserslist": [
"last 1 chrome versions"
],
"keywords": [
"archive",
"compression",
"decompression",
"zip",
"tar",
"bzip2",
"gzip"
]
}

8748
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

1
readme.hints.md Normal file
View File

@ -0,0 +1 @@

130
readme.md
View File

@ -1,54 +1,110 @@
# @push.rocks/smartarchive
work with archives
## Availabililty and Links
* [npmjs.org (npm package)](https://www.npmjs.com/package/@push.rocks/smartarchive)
* [gitlab.com (source)](https://gitlab.com/push.rocks/smartarchive)
* [github.com (source mirror)](https://github.com/push.rocks/smartarchive)
* [docs (typedoc)](https://push.rocks.gitlab.io/smartarchive/)
## Install
## Status for master
To install `@push.rocks/smartarchive`, you need to use npm or yarn. Run either of the following commands in your project directory:
Status Category | Status Badge
-- | --
GitLab Pipelines | [![pipeline status](https://gitlab.com/push.rocks/smartarchive/badges/master/pipeline.svg)](https://lossless.cloud)
GitLab Pipline Test Coverage | [![coverage report](https://gitlab.com/push.rocks/smartarchive/badges/master/coverage.svg)](https://lossless.cloud)
npm | [![npm downloads per month](https://badgen.net/npm/dy/@push.rocks/smartarchive)](https://lossless.cloud)
Snyk | [![Known Vulnerabilities](https://badgen.net/snyk/push.rocks/smartarchive)](https://lossless.cloud)
TypeScript Support | [![TypeScript](https://badgen.net/badge/TypeScript/>=%203.x/blue?icon=typescript)](https://lossless.cloud)
node Support | [![node](https://img.shields.io/badge/node->=%2010.x.x-blue.svg)](https://nodejs.org/dist/latest-v10.x/docs/api/)
Code Style | [![Code Style](https://badgen.net/badge/style/prettier/purple)](https://lossless.cloud)
PackagePhobia (total standalone install weight) | [![PackagePhobia](https://badgen.net/packagephobia/install/@push.rocks/smartarchive)](https://lossless.cloud)
PackagePhobia (package size on registry) | [![PackagePhobia](https://badgen.net/packagephobia/publish/@push.rocks/smartarchive)](https://lossless.cloud)
BundlePhobia (total size when bundled) | [![BundlePhobia](https://badgen.net/bundlephobia/minzip/@push.rocks/smartarchive)](https://lossless.cloud)
```shell
npm install @push.rocks/smartarchive --save
```
or if you prefer yarn:
```shell
yarn add @push.rocks/smartarchive
```
This will add `@push.rocks/smartarchive` to your project's dependencies.
## Usage
Use TypeScript for best in class instellisense.
`@push.rocks/smartarchive` is a powerful module designed to simplify the process of working with archive files such as zip, tar, gzip, and more. It provides an easy-to-use API for extracting, creating, and analyzing archives, making it an ideal choice for projects that require manipulation of archive files.
```javascript
import * as smartarchive from 'smartarchive';
smartarchive
.get({
from: 'https://example.com/example.zip',
toPath: '/some/local/absolute/path',
})
.then(/*...*/);
### Getting Started
First, import `SmartArchive` from `@push.rocks/smartarchive` using ESM syntax:
```typescript
import { SmartArchive } from '@push.rocks/smartarchive';
```
For further information read the linked docs at the top of this README.
### Extracting Archive Files
> MIT licensed | **©** [Lossless GmbH](https://lossless.gmbh)
> | By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy.html)
To extract an archive file, you can use `SmartArchive.fromArchiveUrl`, `SmartArchive.fromArchiveFile`, or `SmartArchive.fromArchiveStream` methods depending on the source of your archive. Here's an example of extracting an archive from a URL:
[![repo-footer](https://pushrocks.gitlab.io/assets/repo-footer.svg)](https://push.rocks)
```typescript
async function extractArchiveFromURL() {
const url = 'https://example.com/archive.zip';
const targetDir = '/path/to/extract';
## Contribution
const archive = await SmartArchive.fromArchiveUrl(url);
await archive.exportToFs(targetDir);
We are always happy for code contributions. If you are not the code contributing type that is ok. Still, maintaining Open Source repositories takes considerable time and thought. If you like the quality of what we do and our modules are useful to you we would appreciate a little monthly contribution: You can [contribute one time](https://lossless.link/contribute-onetime) or [contribute monthly](https://lossless.link/contribute). :)
console.log('Archive extracted successfully.');
}
```
For further information read the linked docs at the top of this readme.
### Creating Archive Files
## Legal
> MIT licensed | **©** [Task Venture Capital GmbH](https://task.vc)
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy)
Creating archive files such as zip or tar.gz is straightforward with `smartarchive`. At the moment, you'll prepare the contents programmatically and then compress them. Detailed support for creating archives will be covered in future updates.
### Analyzing Archive Files
Analyzing the content of archives without extracting them can be useful in various scenarios, such as when you need to inspect the archive's content before deciding to extract it. Here's how you might analyze an archive:
```typescript
async function analyzeArchive() {
const url = 'https://example.com/archive.zip';
const archive = await SmartArchive.fromArchiveUrl(url);
const analysisResult = await archive.analyzeContent();
console.log(analysisResult); // Outputs details about the archive content
}
```
Note: Replace `analyzeContent` with the appropriate method calls as per your implementation or update, as `smartarchive` provides foundational classes and methods for interaction with archive files but does not directly implement an `analyzeContent` method by default.
### Stream Operations
`smartarchive` offers streaming operations, allowing you to work with large archives efficiently. Here's an example of using streams to extract an archive:
```typescript
import { createReadStream, createWriteStream } from 'fs';
async function extractArchiveUsingStream() {
const archiveStream = createReadStream('/path/to/archive.zip');
const archive = await SmartArchive.fromArchiveStream(archiveStream);
const extractionStream = await archive.exportToStreamOfStreamFiles();
extractionStream.pipe(createWriteStream('/path/to/destination'));
}
```
### Conclusion
`@push.rocks/smartarchive` simplifies the process of working with various archive formats in JavaScript and TypeScript projects. By providing an easy-to-use API for common archive operations, it enables developers to integrate archive manipulation features into their applications efficiently.
Remember, the examples provided here are to give you a starting point. Depending on your specific use case, you may need to adjust these examples to fit your project's requirements. Always refer to the latest documentation for the most current information and methods available in `@push.rocks/smartarchive`.
For more information and API references, check the official [`@push.rocks/smartarchive` GitHub repository](https://github.com/pushrocks/smartarchive).
## License and Legal Information
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
### Trademarks
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
### Company Information
Task Venture Capital GmbH
Registered at District court Bremen HRB 35230 HB, Germany
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.

View File

@ -35,7 +35,16 @@ tap.test('should extract existing files on disk', async () => {
const testSmartarchive = await smartarchive.SmartArchive.fromArchiveUrl(
'https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz'
);
const streamfileStream = await testSmartarchive.exportToFs(testPaths.nogitDir);
await testSmartarchive.exportToFs(testPaths.nogitDir);
});
tap.start();
tap.skip.test('should extract a b2zip', async () => {
const dataUrl = 'https://daten.offeneregister.de/de_companies_ocdata.jsonl.bz2';
const testArchive = await smartarchive.SmartArchive.fromArchiveUrl(dataUrl);
await testArchive.exportToFs(
plugins.path.join(testPaths.nogitDir, 'de_companies_ocdata.jsonl'),
'data.jsonl',
);
})
await tap.start();

View File

@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@push.rocks/smartarchive',
version: '4.0.2',
description: 'work with archives'
version: '4.0.29',
description: 'A library for working with archive files, providing utilities for compressing and decompressing data.'
}

41
ts/bzip2/bititerator.ts Normal file
View File

@ -0,0 +1,41 @@
var BITMASK = [0, 0x01, 0x03, 0x07, 0x0F, 0x1F, 0x3F, 0x7F, 0xFF];
// returns a function that reads bits.
// takes a buffer iterator as input
export function bitIterator(nextBuffer: () => Buffer) {
var bit = 0, byte = 0;
var bytes = nextBuffer();
var f = function(n) {
if (n === null && bit != 0) { // align to byte boundary
bit = 0
byte++;
return;
}
var result = 0;
while(n > 0) {
if (byte >= bytes.length) {
byte = 0;
bytes = nextBuffer();
}
var left = 8 - bit;
if (bit === 0 && n > 0)
// @ts-ignore
f.bytesRead++;
if (n >= left) {
result <<= left;
result |= (BITMASK[left] & bytes[byte++]);
bit = 0;
n -= left;
} else {
result <<= n;
result |= ((bytes[byte] & (BITMASK[n] << (8 - n - bit))) >> (8 - n - bit));
bit += n;
n = 0;
}
}
return result;
};
// @ts-ignore
f.bytesRead = 0;
return f;
};

335
ts/bzip2/bzip2.ts Normal file
View File

@ -0,0 +1,335 @@
export class Bzip2Error extends Error {
public name: string = 'Bzip2Error';
public message: string;
public stack = (new Error()).stack;
constructor(messageArg: string) {
super();
this.message = messageArg;
}
}
var messageArg = {
Error: function(message) {throw new Bzip2Error(message);}
};
export class Bzip2 {
public Bzip2Error = Bzip2Error;
public crcTable =
[
0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9,
0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005,
0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61,
0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd,
0x4c11db70, 0x48d0c6c7, 0x4593e01e, 0x4152fda9,
0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75,
0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011,
0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd,
0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039,
0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5,
0xbe2b5b58, 0xbaea46ef, 0xb7a96036, 0xb3687d81,
0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d,
0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49,
0xc7361b4c, 0xc3f706fb, 0xceb42022, 0xca753d95,
0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1,
0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d,
0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae,
0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072,
0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16,
0x018aeb13, 0x054bf6a4, 0x0808d07d, 0x0cc9cdca,
0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde,
0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02,
0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066,
0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba,
0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e,
0xbfa1b04b, 0xbb60adfc, 0xb6238b25, 0xb2e29692,
0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6,
0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a,
0xe0b41de7, 0xe4750050, 0xe9362689, 0xedf73b3e,
0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2,
0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686,
0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a,
0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637,
0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb,
0x4f040d56, 0x4bc510e1, 0x46863638, 0x42472b8f,
0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53,
0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47,
0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b,
0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff,
0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623,
0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60, 0xfc6c70d7,
0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b,
0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f,
0xc423cd6a, 0xc0e2d0dd, 0xcda1f604, 0xc960ebb3,
0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7,
0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b,
0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f,
0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3,
0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640,
0x4e8ee645, 0x4a4ffbf2, 0x470cdd2b, 0x43cdc09c,
0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8,
0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24,
0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30,
0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec,
0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088,
0x2497d08d, 0x2056cd3a, 0x2d15ebe3, 0x29d4f654,
0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0,
0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c,
0xe3a1cbc1, 0xe760d676, 0xea23f0af, 0xeee2ed18,
0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4,
0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0,
0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c,
0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668,
0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4
];
array = function(bytes) {
var bit = 0, byte = 0;
var BITMASK = [0, 0x01, 0x03, 0x07, 0x0F, 0x1F, 0x3F, 0x7F, 0xFF ];
return function(n) {
var result = 0;
while(n > 0) {
var left = 8 - bit;
if (n >= left) {
result <<= left;
result |= (BITMASK[left] & bytes[byte++]);
bit = 0;
n -= left;
} else {
result <<= n;
result |= ((bytes[byte] & (BITMASK[n] << (8 - n - bit))) >> (8 - n - bit));
bit += n;
n = 0;
}
}
return result;
}
}
simple = function(srcbuffer, stream) {
var bits = this.array(srcbuffer);
var size = this.header(bits);
var ret = false;
var bufsize = 100000 * size;
var buf = new Int32Array(bufsize);
do {
ret = this.decompress(bits, stream, buf, bufsize);
} while(!ret);
}
header = function(bits) {
this.byteCount = new Int32Array(256);
this.symToByte = new Uint8Array(256);
this.mtfSymbol = new Int32Array(256);
this.selectors = new Uint8Array(0x8000);
if (bits(8*3) != 4348520) messageArg.Error("No magic number found");
var i = bits(8) - 48;
if (i < 1 || i > 9) messageArg.Error("Not a BZIP archive");
return i;
};
decompress = function(bits, stream, buf, bufsize, streamCRC) {
var MAX_HUFCODE_BITS = 20;
var MAX_SYMBOLS = 258;
var SYMBOL_RUNA = 0;
var SYMBOL_RUNB = 1;
var GROUP_SIZE = 50;
var crc = 0 ^ (-1);
for(var h = '', i = 0; i < 6; i++) h += bits(8).toString(16);
if (h == "177245385090") {
var finalCRC = bits(32)|0;
if (finalCRC !== streamCRC) messageArg.Error("Error in bzip2: crc32 do not match");
// align stream to byte
bits(null);
return null; // reset streamCRC for next call
}
if (h != "314159265359") messageArg.Error("eek not valid bzip data");
var crcblock = bits(32)|0; // CRC code
if (bits(1)) messageArg.Error("unsupported obsolete version");
var origPtr = bits(24);
if (origPtr > bufsize) messageArg.Error("Initial position larger than buffer size");
var t = bits(16);
var symTotal = 0;
for (i = 0; i < 16; i++) {
if (t & (1 << (15 - i))) {
var k = bits(16);
for(j = 0; j < 16; j++) {
if (k & (1 << (15 - j))) {
this.symToByte[symTotal++] = (16 * i) + j;
}
}
}
}
var groupCount = bits(3);
if (groupCount < 2 || groupCount > 6) messageArg.Error("another error");
var nSelectors = bits(15);
if (nSelectors == 0) messageArg.Error("meh");
for(var i = 0; i < groupCount; i++) this.mtfSymbol[i] = i;
for(var i = 0; i < nSelectors; i++) {
for(var j = 0; bits(1); j++) if (j >= groupCount) messageArg.Error("whoops another error");
var uc = this.mtfSymbol[j];
for(var k: any = j-1; k>=0; k--) {
this.mtfSymbol[k+1] = this.mtfSymbol[k];
}
this.mtfSymbol[0] = uc;
this.selectors[i] = uc;
}
var symCount = symTotal + 2;
var groups = [];
var length = new Uint8Array(MAX_SYMBOLS),
temp = new Uint16Array(MAX_HUFCODE_BITS+1);
var hufGroup;
for(var j = 0; j < groupCount; j++) {
t = bits(5); //lengths
for(var i = 0; i < symCount; i++) {
while(true){
if (t < 1 || t > MAX_HUFCODE_BITS) messageArg.Error("I gave up a while ago on writing error messages");
if (!bits(1)) break;
if (!bits(1)) t++;
else t--;
}
length[i] = t;
}
var minLen, maxLen;
minLen = maxLen = length[0];
for(var i = 1; i < symCount; i++) {
if (length[i] > maxLen) maxLen = length[i];
else if (length[i] < minLen) minLen = length[i];
}
hufGroup = groups[j] = {};
hufGroup.permute = new Int32Array(MAX_SYMBOLS);
hufGroup.limit = new Int32Array(MAX_HUFCODE_BITS + 1);
hufGroup.base = new Int32Array(MAX_HUFCODE_BITS + 1);
hufGroup.minLen = minLen;
hufGroup.maxLen = maxLen;
var base = hufGroup.base;
var limit = hufGroup.limit;
var pp = 0;
for(var i: number = minLen; i <= maxLen; i++)
for(var t: any = 0; t < symCount; t++)
if (length[t] == i) hufGroup.permute[pp++] = t;
for(i = minLen; i <= maxLen; i++) temp[i] = limit[i] = 0;
for(i = 0; i < symCount; i++) temp[length[i]]++;
pp = t = 0;
for(i = minLen; i < maxLen; i++) {
pp += temp[i];
limit[i] = pp - 1;
pp <<= 1;
base[i+1] = pp - (t += temp[i]);
}
limit[maxLen] = pp + temp[maxLen] - 1;
base[minLen] = 0;
}
for(var i = 0; i < 256; i++) {
this.mtfSymbol[i] = i;
this.byteCount[i] = 0;
}
var runPos, count, symCount: number, selector;
runPos = count = symCount = selector = 0;
while(true) {
if (!(symCount--)) {
symCount = GROUP_SIZE - 1;
if (selector >= nSelectors) messageArg.Error("meow i'm a kitty, that's an error");
hufGroup = groups[this.selectors[selector++]];
base = hufGroup.base;
limit = hufGroup.limit;
}
i = hufGroup.minLen;
j = bits(i);
while(true) {
if (i > hufGroup.maxLen) messageArg.Error("rawr i'm a dinosaur");
if (j <= limit[i]) break;
i++;
j = (j << 1) | bits(1);
}
j -= base[i];
if (j < 0 || j >= MAX_SYMBOLS) messageArg.Error("moo i'm a cow");
var nextSym = hufGroup.permute[j];
if (nextSym == SYMBOL_RUNA || nextSym == SYMBOL_RUNB) {
if (!runPos){
runPos = 1;
t = 0;
}
if (nextSym == SYMBOL_RUNA) t += runPos;
else t += 2 * runPos;
runPos <<= 1;
continue;
}
if (runPos) {
runPos = 0;
if (count + t > bufsize) messageArg.Error("Boom.");
uc = this.symToByte[this.mtfSymbol[0]];
this.byteCount[uc] += t;
while(t--) buf[count++] = uc;
}
if (nextSym > symTotal) break;
if (count >= bufsize) messageArg.Error("I can't think of anything. Error");
i = nextSym - 1;
uc = this.mtfSymbol[i];
for(var k: any = i-1; k>=0; k--) {
this.mtfSymbol[k+1] = this.mtfSymbol[k];
}
this.mtfSymbol[0] = uc
uc = this.symToByte[uc];
this.byteCount[uc]++;
buf[count++] = uc;
}
if (origPtr < 0 || origPtr >= count) messageArg.Error("I'm a monkey and I'm throwing something at someone, namely you");
var j = 0;
for(var i = 0; i < 256; i++) {
k = j + this.byteCount[i];
this.byteCount[i] = j;
j = k;
}
for(var i = 0; i < count; i++) {
uc = buf[i] & 0xff;
buf[this.byteCount[uc]] |= (i << 8);
this.byteCount[uc]++;
}
var pos = 0, current = 0, run = 0;
if (count) {
pos = buf[origPtr];
current = (pos & 0xff);
pos >>= 8;
run = -1;
}
count = count;
var copies, previous, outbyte;
while(count) {
count--;
previous = current;
pos = buf[pos];
current = pos & 0xff;
pos >>= 8;
if (run++ == 3) {
copies = current;
outbyte = previous;
current = -1;
} else {
copies = 1;
outbyte = current;
}
while(copies--) {
crc = ((crc << 8) ^ this.crcTable[((crc>>24) ^ outbyte) & 0xFF])&0xFFFFFFFF; // crc32
stream(outbyte);
}
if (current != previous) run = 0;
}
crc = (crc ^ (-1)) >>> 0;
if ((crc|0) != (crcblock|0)) messageArg.Error("Error in bzip2: crc32 do not match");
streamCRC = (crc ^ ((streamCRC << 1) | (streamCRC >>> 31))) & 0xFFFFFFFF;
return streamCRC;
};
};

93
ts/bzip2/index.ts Normal file
View File

@ -0,0 +1,93 @@
import * as plugins from '../plugins.js';
import { Bzip2 } from './bzip2.js';
import { bitIterator } from './bititerator.js';
export function unbzip2Stream() {
const bzip2Instance = new Bzip2();
var bufferQueue = [];
var hasBytes = 0;
var blockSize = 0;
var broken = false;
var done = false;
var bitReader = null;
var streamCRC = null;
function decompressBlock() {
if (!blockSize) {
blockSize = bzip2Instance.header(bitReader);
streamCRC = 0;
} else {
var bufsize = 100000 * blockSize;
var buf = new Int32Array(bufsize);
var chunk = [];
var f = function (b) {
chunk.push(b);
};
streamCRC = bzip2Instance.decompress(bitReader, f, buf, bufsize, streamCRC);
if (streamCRC === null) {
// reset for next bzip2 header
blockSize = 0;
return;
} else {
return Buffer.from(chunk);
}
}
}
var outlength = 0;
const decompressAndPush = async () => {
if (broken) return;
try {
const resultChunk = decompressBlock();
if (resultChunk) {
outlength += resultChunk.length;
}
return resultChunk;
} catch (e) {
console.error(e);
broken = true;
}
};
let counter = 0;
return new plugins.smartstream.SmartDuplex({
objectMode: true,
name: 'bzip2',
debug: false,
highWaterMark: 1,
writeFunction: async function (data, streamTools) {
// console.log(`got chunk ${counter++}`)
bufferQueue.push(data);
hasBytes += data.length;
if (bitReader === null) {
bitReader = bitIterator(function () {
return bufferQueue.shift();
});
}
while (!broken && hasBytes - bitReader.bytesRead + 1 >= (25000 + 100000 * blockSize || 4)) {
//console.error('decompressing with', hasBytes - bitReader.bytesRead + 1, 'bytes in buffer');
const result = await decompressAndPush();
if (!result) {
continue;
}
// console.log(result.toString());
await streamTools.push(result);
}
},
finalFunction: async function (streamTools) {
//console.error(x,'last compressing with', hasBytes, 'bytes in buffer');
while (!broken && bitReader && hasBytes > bitReader.bytesRead) {
const result = await decompressAndPush();
if (!result) {
continue;
}
await streamTools.push(result);
}
if (!broken) {
if (streamCRC !== null) this.emit('error', new Error('input stream ended prematurely'));
}
},
});
}

View File

@ -4,7 +4,7 @@ import * as plugins from './plugins.js';
export interface IAnalyzedResult {
fileType: plugins.fileType.FileTypeResult;
isArchive: boolean;
resultStream: plugins.smartstream.PassThrough;
resultStream: plugins.smartstream.SmartDuplex;
decompressionStream: plugins.stream.Transform | plugins.stream.Duplex | plugins.tarStream.Extract;
}
@ -30,44 +30,52 @@ export class ArchiveAnalyzer {
}
private getDecompressionStream(
private async getDecompressionStream(
mimeTypeArg: plugins.fileType.FileTypeResult['mime']
): plugins.stream.Transform | plugins.stream.Duplex | plugins.tarStream.Extract {
): Promise<plugins.stream.Transform | plugins.stream.Duplex | plugins.tarStream.Extract> {
switch (mimeTypeArg) {
case 'application/gzip':
return this.smartArchiveRef.gzipTools.getDecompressionStream();
case 'application/zip':
return this.smartArchiveRef.zipTools.getDecompressionStream();
case 'application/x-bzip2':
return this.smartArchiveRef.bzip2Tools.getDecompressionStream(); // replace with your own bzip2 decompression stream
return await this.smartArchiveRef.bzip2Tools.getDecompressionStream(); // replace with your own bzip2 decompression stream
case 'application/x-tar':
return this.smartArchiveRef.tarTools.getDecompressionStream(); // replace with your own tar decompression stream
default:
// Handle unsupported formats or no decompression needed
return new plugins.smartstream.PassThrough();
return plugins.smartstream.createPassThrough();
}
}
public getAnalyzedStream() {
let firstRun = true;
const resultStream = new plugins.smartstream.PassThrough();
const resultStream = plugins.smartstream.createPassThrough();
const analyzerstream = new plugins.smartstream.SmartDuplex<Buffer, IAnalyzedResult>({
readableObjectMode: true,
writeAndTransformFunction: async (chunkArg: Buffer, streamtools) => {
const fileType = await plugins.fileType.fileTypeFromBuffer(chunkArg);
const decompressionStream = this.getDecompressionStream(fileType.mime as any);
resultStream.push(chunkArg);
writeFunction: async (chunkArg: Buffer, streamtools) => {
if (firstRun) {
firstRun = false;
const fileType = await plugins.fileType.fileTypeFromBuffer(chunkArg);
const decompressionStream = await this.getDecompressionStream(fileType?.mime as any);
/**
* analyzed stream emits once with this object
*/
const result: IAnalyzedResult = {
fileType,
isArchive: await this.mimeTypeIsArchive(fileType.mime),
isArchive: await this.mimeTypeIsArchive(fileType?.mime),
resultStream,
decompressionStream,
};
streamtools.push(result);
streamtools.push(null);
return null;
await streamtools.push(result);
}
await resultStream.backpressuredPush(chunkArg);
return null;
},
finalFunction: async (tools) => {
resultStream.push(null);
return null;
}
});
return analyzerstream;
}

View File

@ -1,36 +1,7 @@
import type { SmartArchive } from './classes.smartarchive.js';
import * as plugins from './plugins.js';
export class DecompressBzip2Transform extends plugins.stream.Transform {
private bzip2Decompressor: plugins.stream.Transform;
constructor() {
super();
// Initialize the bzip2 decompressor once here
this.bzip2Decompressor = plugins.unbzip2Stream();
this.bzip2Decompressor.on('data', (data: Buffer) => {
// When data is decompressed, push it to the stream
this.push(data);
});
this.bzip2Decompressor.on('error', (err) => {
// If an error occurs, emit it on this stream
this.emit('error', err);
});
}
_transform(chunk: Buffer, encoding: BufferEncoding, callback: plugins.stream.TransformCallback) {
// Pass the chunk directly to the decompressor
// The decompressor will handle the state across chunks
this.bzip2Decompressor.write(chunk);
callback();
}
_flush(callback: plugins.stream.TransformCallback) {
// When the stream is ending, end the decompressor stream as well
this.bzip2Decompressor.end();
callback();
}
}
import { unbzip2Stream } from './bzip2/index.js';
export class Bzip2Tools {
smartArchiveRef: SmartArchive;
@ -40,6 +11,6 @@ export class Bzip2Tools {
}
getDecompressionStream() {
return new DecompressBzip2Transform();
return unbzip2Stream();
}
}

View File

@ -1,9 +1,10 @@
import * as plugins from './plugins.js';
import * as paths from './paths.js';
import { Bzip2Tools } from './classes.bzip2tools.js';
import { GzipTools } from './classes.gziptools.js';
import { TarTools } from './classes.tartools.js';
import { Bzip2Tools } from './classes.bzip2tools.js';
import { ZipTools } from './classes.ziptools.js';
import { ArchiveAnalyzer, type IAnalyzedResult } from './classes.archiveanalyzer.js';
@ -32,9 +33,10 @@ export class SmartArchive {
}
// INSTANCE
public tarTools = new TarTools(this);
public gzipTools = new GzipTools(this);
public bzip2Tools = new Bzip2Tools(this);
public tarTools = new TarTools(this);
public zipTools = new ZipTools(this);
public archiveAnalyzer = new ArchiveAnalyzer(this);
public sourceUrl: string;
@ -74,24 +76,35 @@ export class SmartArchive {
// return archiveStream;
}
public async exportToFs(targetDir: string): Promise<void> {
public async exportToFs(targetDir: string, fileNameArg?: string): Promise<void> {
const done = plugins.smartpromise.defer<void>();
const streamFileStream = await this.exportToStreamOfStreamFiles();
streamFileStream.pipe(new plugins.smartstream.SmartDuplex({
objectMode: true,
writeAndTransformFunction: async (chunkArg: plugins.smartfile.StreamFile, streamtools) => {
console.log(chunkArg.relativeFilePath);
const streamFile = chunkArg;
const readStream = await streamFile.createReadStream();
const writePath = plugins.path.join(targetDir + streamFile.relativeFilePath);
const dir = plugins.path.parse(writePath).dir;
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(dir));
const writeStream = plugins.smartfile.fsStream.createWriteStream(writePath);
readStream.pipe(writeStream).end(() => {
streamFileStream.pipe(
new plugins.smartstream.SmartDuplex({
objectMode: true,
writeFunction: async (streamFileArg: plugins.smartfile.StreamFile, streamtools) => {
const done = plugins.smartpromise.defer<void>();
console.log(streamFileArg.relativeFilePath ? streamFileArg.relativeFilePath : 'no relative path');
const streamFile = streamFileArg;
const readStream = await streamFile.createReadStream();
await plugins.smartfile.fs.ensureDir(targetDir);
const writePath = plugins.path.join(
targetDir,
streamFile.relativeFilePath || fileNameArg
);
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(writePath));
const writeStream = plugins.smartfile.fsStream.createWriteStream(writePath);
readStream.pipe(writeStream);
writeStream.on('finish', () => {
done.resolve();
});
await done.promise;
},
finalFunction: async () => {
done.resolve();
});
},
}));
},
})
);
return done.promise;
}
@ -106,22 +119,38 @@ export class SmartArchive {
const createUnpackStream = () =>
plugins.smartstream.createTransformFunction<IAnalyzedResult, any>(
async (analyzedResultChunk) => {
if (analyzedResultChunk.fileType.mime === 'application/x-tar') {
if (analyzedResultChunk.fileType?.mime === 'application/x-tar') {
const tarStream = analyzedResultChunk.decompressionStream as plugins.tarStream.Extract;
tarStream.on(
'entry',
async (header, stream, next) => {
const streamfile = plugins.smartfile.StreamFile.fromStream(stream, header.name);
streamFileIntake.push(streamfile);
stream.on('end', function () {
next(); // ready for next entry
});
tarStream.on('entry', async (header, stream, next) => {
if (header.type === 'directory') {
console.log(`tar stream directory: ${header.name} ... skipping!`);
next();
return;
}
);
console.log(`tar stream file: ${header.name}`);
const streamfile = plugins.smartfile.StreamFile.fromStream(stream, header.name);
streamFileIntake.push(streamfile);
stream.on('end', function () {
next(); // ready for next entry
});
});
tarStream.on('finish', function () {
console.log('finished');
streamFileIntake.signalEnd();
})
});
analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream);
} else if (analyzedResultChunk.fileType?.mime === 'application/zip') {
analyzedResultChunk.resultStream
.pipe(analyzedResultChunk.decompressionStream)
.pipe(new plugins.smartstream.SmartDuplex({
objectMode: true,
writeFunction: async (streamFileArg: plugins.smartfile.StreamFile, streamtools) => {
streamFileIntake.push(streamFileArg);
},
finalFunction: async () => {
streamFileIntake.signalEnd();
}
}));
} else if (analyzedResultChunk.isArchive && analyzedResultChunk.decompressionStream) {
analyzedResultChunk.resultStream
.pipe(analyzedResultChunk.decompressionStream)
@ -130,10 +159,14 @@ export class SmartArchive {
} else {
const streamFile = plugins.smartfile.StreamFile.fromStream(
analyzedResultChunk.resultStream,
analyzedResultChunk.fileType.ext
analyzedResultChunk.fileType?.ext
);
streamFileIntake.push(streamFile);
streamFileIntake.signalEnd();
}
},
{
objectMode: true,
}
);

View File

@ -2,28 +2,120 @@ import type { SmartArchive } from './classes.smartarchive.js';
import * as plugins from './plugins.js';
export class TarTools {
// INSTANCE
smartArchiveRef: SmartArchive;
constructor(smartArchiveRefArg: SmartArchive) {
this.smartArchiveRef = smartArchiveRefArg;
}
// packing
public addFileToPack(pack: plugins.tarStream.Pack, fileName: string, content: string | Buffer) {
return new Promise<void>((resolve, reject) => {
const entry = pack.entry({ name: fileName, size: content.length }, (err: Error) => {
if (err) {
reject(err);
} else {
resolve();
}
});
entry.write(content);
entry.end();
// packing
public async addFileToPack(
pack: plugins.tarStream.Pack,
optionsArg: {
fileName?: string;
content?:
| string
| Buffer
| plugins.smartstream.stream.Readable
| plugins.smartfile.SmartFile
| plugins.smartfile.StreamFile;
byteLength?: number;
filePath?: string;
}
): Promise<void> {
return new Promise<void>(async (resolve, reject) => {
let fileName =
optionsArg.fileName || optionsArg.content instanceof plugins.smartfile.SmartFile
? (optionsArg.content as plugins.smartfile.SmartFile).relative
: null || optionsArg.content instanceof plugins.smartfile.StreamFile
? (optionsArg.content as plugins.smartfile.StreamFile).relativeFilePath
: null || optionsArg.filePath;
/**
* contentByteLength is used to set the size of the entry in the tar file
*/
let contentByteLength: number;
if (optionsArg.byteLength) {
contentByteLength = optionsArg.byteLength;
} else if (typeof optionsArg.content === 'string') {
contentByteLength = Buffer.byteLength(optionsArg.content, 'utf8');
} else if (Buffer.isBuffer(optionsArg.content)) {
contentByteLength = optionsArg.content.length;
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
contentByteLength = await optionsArg.content.getSize(); // assuming SmartFile has getSize method
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
contentByteLength = await optionsArg.content.getSize(); // assuming StreamFile has getSize method
} else if (optionsArg.content instanceof plugins.smartstream.stream.Readable) {
console.warn(
'@push.rocks/smartarchive: When streaming, it is recommended to provide byteLength, if known.'
);
} else if (optionsArg.filePath) {
const fileStat = await plugins.smartfile.fs.stat(optionsArg.filePath);
contentByteLength = fileStat.size;
}
/**
* here we try to harmonize all kind of entries towards a readable stream
*/
let content: plugins.smartstream.stream.Readable;
if (Buffer.isBuffer(optionsArg.content)) {
content = plugins.smartstream.stream.Readable.from(optionsArg.content);
} else if (typeof optionsArg.content === 'string') {
content = plugins.smartstream.stream.Readable.from(Buffer.from(optionsArg.content));
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
content = plugins.smartstream.stream.Readable.from(optionsArg.content.contents);
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
content = await optionsArg.content.createReadStream();
} else if (optionsArg.content instanceof plugins.smartstream.stream.Readable) {
content = optionsArg.content;
}
const entry = pack.entry(
{
name: fileName,
...(contentByteLength
? {
size: contentByteLength,
}
: null),
},
(err: Error) => {
if (err) {
reject(err);
} else {
resolve();
}
}
);
content.pipe(entry);
entry.on('end', () => {
resolve();
});
});
}
/**
* packs a directory from disk into a tar stream
* @param directoryPath
*/
public async packDirectory(directoryPath: string) {
const fileTree = await plugins.smartfile.fs.listFileTree(directoryPath, '**/*');
const pack = await this.getPackStream();
for (const filePath of fileTree) {
const absolutePath = plugins.path.join(directoryPath, filePath);
const fileStat = await plugins.smartfile.fs.stat(absolutePath);
await this.addFileToPack(pack, {
byteLength: fileStat.size,
filePath: filePath,
fileName: filePath,
content: plugins.smartfile.fsStream.createReadStream(absolutePath),
});
}
return pack;
}
public async getPackStream() {
const pack = plugins.tarStream.pack();
return pack;

77
ts/classes.ziptools.ts Normal file
View File

@ -0,0 +1,77 @@
import type { SmartArchive } from './classes.smartarchive.js';
import * as plugins from './plugins.js';
class DecompressZipTransform extends plugins.smartstream.SmartDuplex<ArrayBufferLike> {
private streamtools: plugins.smartstream.IStreamTools;
private unzipper = new plugins.fflate.Unzip(async (fileArg) => {
let resultBuffer: Buffer;
fileArg.ondata = async (flateError, dat, final) => {
resultBuffer? resultBuffer = Buffer.concat([resultBuffer, Buffer.from(dat)])
: resultBuffer = Buffer.from(dat);
if (final) {
const streamFile = plugins.smartfile.StreamFile.fromBuffer(resultBuffer);
streamFile.relativeFilePath = fileArg.name;
this.streamtools.push(streamFile);
}
}
fileArg.start();
});
constructor() {
super({
objectMode: true,
writeFunction: async (chunkArg: Buffer, streamtoolsArg) => {
this.streamtools? null : this.streamtools = streamtoolsArg;
this.unzipper.push(chunkArg, false);
},
finalFunction: async () => {
this.unzipper.push(Buffer.from(''), true);
await plugins.smartdelay.delayFor(0);
await this.streamtools.push(null);
}
});
this.unzipper.register(plugins.fflate.UnzipInflate);
}
}
// This class wraps fflate's zip in a Node.js Transform stream for compression
export class CompressZipTransform extends plugins.stream.Transform {
files: { [fileName: string]: Uint8Array };
constructor() {
super();
this.files = {};
}
_transform(chunk: Buffer, encoding: BufferEncoding, callback: plugins.stream.TransformCallback) {
// Simple example: storing chunks in memory before finalizing ZIP in _flush
this.files['file.txt'] = new Uint8Array(chunk);
callback();
}
_flush(callback: plugins.stream.TransformCallback) {
plugins.fflate.zip(this.files, (err, zipped) => {
if (err) {
callback(err);
} else {
this.push(Buffer.from(zipped));
callback();
}
});
}
}
export class ZipTools {
smartArchiveRef: SmartArchive;
constructor(smartArchiveRefArg: SmartArchive) {
this.smartArchiveRef = smartArchiveRefArg;
}
public getCompressionStream() {
return new CompressZipTransform();
}
public getDecompressionStream() {
return new DecompressZipTransform();
}
}

View File

@ -1 +1,3 @@
export * from './classes.smartarchive.js';
export * from './classes.tartools.js';
export * from './classes.ziptools.js';

View File

@ -6,6 +6,7 @@ export { path, stream };
// @pushrocks scope
import * as smartfile from '@push.rocks/smartfile';
import * as smartdelay from '@push.rocks/smartdelay';
import * as smartpath from '@push.rocks/smartpath';
import * as smartpromise from '@push.rocks/smartpromise';
import * as smartrequest from '@push.rocks/smartrequest';
@ -14,12 +15,11 @@ import * as smartstream from '@push.rocks/smartstream';
import * as smartrx from '@push.rocks/smartrx';
import * as smarturl from '@push.rocks/smarturl';
export { smartfile, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx, smarturl };
export { smartfile, smartdelay, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx, smarturl };
// third party scope
import * as fileType from 'file-type';
import * as fflate from 'fflate';
import tarStream from 'tar-stream';
import unbzip2Stream from 'unbzip2-stream';
export { fileType, fflate, tarStream, unbzip2Stream };
export { fileType, fflate, tarStream };