Compare commits
57 Commits
Author | SHA1 | Date | |
---|---|---|---|
f257c0c5a4 | |||
725546e409 | |||
b9645dfb99 | |||
b860aca103 | |||
39fb6e8ad1 | |||
04968a80b0 | |||
e4a2c143bc | |||
ed6d186a85 | |||
553c5dfe99 | |||
5d94efb9ee | |||
c978ca107b | |||
876c8ce9d8 | |||
7327bf1bd0 | |||
2dcb10d233 | |||
d53c46fa82 | |||
25e847a9ea | |||
cc0ecb3f16 | |||
2cd0846c74 | |||
49ab40af09 | |||
5ff51ff88d | |||
c578a3fdc1 | |||
ad0352a712 | |||
f921338fd6 | |||
614dae5ade | |||
f87359fb97 | |||
21da75c09a | |||
fe49d25765 | |||
5b693c6143 | |||
3206738da5 | |||
f709421621 | |||
75be95fe45 | |||
1113020e17 | |||
adf4bb64ad | |||
0f630382e1 | |||
fc09d5aeac | |||
e697730559 | |||
0e61bd7c9a | |||
be5f3912cf | |||
bb6fa71b99 | |||
338546ebb2 | |||
da22f375d2 | |||
3e23534f9d | |||
1323458130 | |||
95069fe5c0 | |||
3ca92d0bf3 | |||
1982d28b89 | |||
c1842c051c | |||
dc31577725 | |||
32de8087ad | |||
e3ab98751d | |||
401150bd10 | |||
77e75ee0d9 | |||
2f33672374 | |||
79387c02e4 | |||
a9bd693065 | |||
35f66736f0 | |||
99ac52b3d4 |
@ -119,6 +119,6 @@ jobs:
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
pnpm install -g @gitzone/tsdoc
|
||||
pnpm install -g @git.zone/tsdoc
|
||||
npmci command tsdoc
|
||||
continue-on-error: true
|
||||
|
2
dist_ts/index.d.ts
vendored
2
dist_ts/index.d.ts
vendored
@ -1 +1 @@
|
||||
export * from './smartarchive.classes.smartarchive.js';
|
||||
export * from './classes.smartarchive.js';
|
||||
|
@ -1,2 +1,2 @@
|
||||
export * from './smartarchive.classes.smartarchive.js';
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxjQUFjLHdDQUF3QyxDQUFDIn0=
|
||||
export * from './classes.smartarchive.js';
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxjQUFjLDJCQUEyQixDQUFDIn0=
|
6
license
6
license
@ -1,6 +1,10 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 Lossless GmbH
|
||||
Copyright (c) 2016 Task Venture Capital GmbH
|
||||
|
||||
Includes work under MIT license with copyrights:
|
||||
* Copyright (c) 2017 by Jan Boelsche (jan@lagomorph.de)
|
||||
* Copyright 2011 by antimatter15 (antimatter15@gmail.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -6,12 +6,24 @@
|
||||
"gitzone": {
|
||||
"projectType": "npm",
|
||||
"module": {
|
||||
"githost": "gitlab.com",
|
||||
"githost": "code.foss.global",
|
||||
"gitscope": "push.rocks",
|
||||
"gitrepo": "smartarchive",
|
||||
"description": "work with archives",
|
||||
"description": "A library for working with archive files, providing utilities for compressing and decompressing data.",
|
||||
"npmPackagename": "@push.rocks/smartarchive",
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"archive",
|
||||
"compression",
|
||||
"decompression",
|
||||
"zip",
|
||||
"tar",
|
||||
"bzip2",
|
||||
"gzip"
|
||||
]
|
||||
}
|
||||
},
|
||||
"tsdoc": {
|
||||
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
||||
}
|
||||
}
|
47
package.json
47
package.json
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@push.rocks/smartarchive",
|
||||
"version": "3.0.7",
|
||||
"description": "work with archives",
|
||||
"version": "4.0.24",
|
||||
"description": "A library for working with archive files, providing utilities for compressing and decompressing data.",
|
||||
"main": "dist_ts/index.js",
|
||||
"typings": "dist_ts/index.d.ts",
|
||||
"type": "module",
|
||||
@ -12,33 +12,35 @@
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/pushrocks/smartarchive.git"
|
||||
"url": "https://code.foss.global/push.rocks/smartarchive.git"
|
||||
},
|
||||
"author": "Lossless GmbH",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/pushrocks/smartarchive/issues"
|
||||
},
|
||||
"homepage": "https://github.com/pushrocks/smartarchive#readme",
|
||||
"homepage": "https://code.foss.global/push.rocks/smartarchive",
|
||||
"dependencies": {
|
||||
"@push.rocks/smartfile": "^10.0.28",
|
||||
"@push.rocks/smartdelay": "^3.0.5",
|
||||
"@push.rocks/smartfile": "^11.0.4",
|
||||
"@push.rocks/smartpath": "^5.0.11",
|
||||
"@push.rocks/smartpromise": "^4.0.3",
|
||||
"@push.rocks/smartrequest": "^2.0.18",
|
||||
"@push.rocks/smartrx": "^3.0.6",
|
||||
"@push.rocks/smartstream": "^2.0.4",
|
||||
"@push.rocks/smartunique": "^3.0.3",
|
||||
"@types/gunzip-maybe": "^1.4.0",
|
||||
"@types/tar-stream": "^2.2.2",
|
||||
"gunzip-maybe": "^1.4.2",
|
||||
"tar": "^6.1.15",
|
||||
"tar-stream": "^3.1.6"
|
||||
"@push.rocks/smartrequest": "^2.0.21",
|
||||
"@push.rocks/smartrx": "^3.0.7",
|
||||
"@push.rocks/smartstream": "^3.0.34",
|
||||
"@push.rocks/smartunique": "^3.0.8",
|
||||
"@push.rocks/smarturl": "^3.0.7",
|
||||
"@types/tar-stream": "^3.1.3",
|
||||
"fflate": "^0.8.2",
|
||||
"file-type": "^19.0.0",
|
||||
"tar-stream": "^3.1.7",
|
||||
"through": "^2.3.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@gitzone/tsbuild": "^2.1.66",
|
||||
"@gitzone/tsrun": "^1.2.44",
|
||||
"@gitzone/tstest": "^1.0.77",
|
||||
"@push.rocks/tapbundle": "^5.0.12"
|
||||
"@git.zone/tsbuild": "^2.1.72",
|
||||
"@git.zone/tsrun": "^1.2.44",
|
||||
"@git.zone/tstest": "^1.0.88",
|
||||
"@push.rocks/tapbundle": "^5.0.17"
|
||||
},
|
||||
"private": false,
|
||||
"files": [
|
||||
@ -55,5 +57,14 @@
|
||||
],
|
||||
"browserslist": [
|
||||
"last 1 chrome versions"
|
||||
],
|
||||
"keywords": [
|
||||
"archive",
|
||||
"compression",
|
||||
"decompression",
|
||||
"zip",
|
||||
"tar",
|
||||
"bzip2",
|
||||
"gzip"
|
||||
]
|
||||
}
|
||||
|
2762
pnpm-lock.yaml
generated
2762
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
1
readme.hints.md
Normal file
1
readme.hints.md
Normal file
@ -0,0 +1 @@
|
||||
|
130
readme.md
130
readme.md
@ -1,54 +1,110 @@
|
||||
# @push.rocks/smartarchive
|
||||
work with archives
|
||||
|
||||
## Availabililty and Links
|
||||
* [npmjs.org (npm package)](https://www.npmjs.com/package/@push.rocks/smartarchive)
|
||||
* [gitlab.com (source)](https://gitlab.com/push.rocks/smartarchive)
|
||||
* [github.com (source mirror)](https://github.com/push.rocks/smartarchive)
|
||||
* [docs (typedoc)](https://push.rocks.gitlab.io/smartarchive/)
|
||||
## Install
|
||||
|
||||
## Status for master
|
||||
To install `@push.rocks/smartarchive`, you need to use npm or yarn. Run either of the following commands in your project directory:
|
||||
|
||||
Status Category | Status Badge
|
||||
-- | --
|
||||
GitLab Pipelines | [](https://lossless.cloud)
|
||||
GitLab Pipline Test Coverage | [](https://lossless.cloud)
|
||||
npm | [](https://lossless.cloud)
|
||||
Snyk | [](https://lossless.cloud)
|
||||
TypeScript Support | [](https://lossless.cloud)
|
||||
node Support | [](https://nodejs.org/dist/latest-v10.x/docs/api/)
|
||||
Code Style | [](https://lossless.cloud)
|
||||
PackagePhobia (total standalone install weight) | [](https://lossless.cloud)
|
||||
PackagePhobia (package size on registry) | [](https://lossless.cloud)
|
||||
BundlePhobia (total size when bundled) | [](https://lossless.cloud)
|
||||
```shell
|
||||
npm install @push.rocks/smartarchive --save
|
||||
```
|
||||
|
||||
or if you prefer yarn:
|
||||
|
||||
```shell
|
||||
yarn add @push.rocks/smartarchive
|
||||
```
|
||||
|
||||
This will add `@push.rocks/smartarchive` to your project's dependencies.
|
||||
|
||||
## Usage
|
||||
|
||||
Use TypeScript for best in class instellisense.
|
||||
`@push.rocks/smartarchive` is a powerful module designed to simplify the process of working with archive files such as zip, tar, gzip, and more. It provides an easy-to-use API for extracting, creating, and analyzing archives, making it an ideal choice for projects that require manipulation of archive files.
|
||||
|
||||
```javascript
|
||||
import * as smartarchive from 'smartarchive';
|
||||
smartarchive
|
||||
.get({
|
||||
from: 'https://example.com/example.zip',
|
||||
toPath: '/some/local/absolute/path',
|
||||
})
|
||||
.then(/*...*/);
|
||||
### Getting Started
|
||||
|
||||
First, import `SmartArchive` from `@push.rocks/smartarchive` using ESM syntax:
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
```
|
||||
|
||||
For further information read the linked docs at the top of this README.
|
||||
### Extracting Archive Files
|
||||
|
||||
> MIT licensed | **©** [Lossless GmbH](https://lossless.gmbh)
|
||||
> | By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy.html)
|
||||
To extract an archive file, you can use `SmartArchive.fromArchiveUrl`, `SmartArchive.fromArchiveFile`, or `SmartArchive.fromArchiveStream` methods depending on the source of your archive. Here's an example of extracting an archive from a URL:
|
||||
|
||||
[](https://push.rocks)
|
||||
```typescript
|
||||
async function extractArchiveFromURL() {
|
||||
const url = 'https://example.com/archive.zip';
|
||||
const targetDir = '/path/to/extract';
|
||||
|
||||
## Contribution
|
||||
const archive = await SmartArchive.fromArchiveUrl(url);
|
||||
await archive.exportToFs(targetDir);
|
||||
|
||||
We are always happy for code contributions. If you are not the code contributing type that is ok. Still, maintaining Open Source repositories takes considerable time and thought. If you like the quality of what we do and our modules are useful to you we would appreciate a little monthly contribution: You can [contribute one time](https://lossless.link/contribute-onetime) or [contribute monthly](https://lossless.link/contribute). :)
|
||||
console.log('Archive extracted successfully.');
|
||||
}
|
||||
```
|
||||
|
||||
For further information read the linked docs at the top of this readme.
|
||||
### Creating Archive Files
|
||||
|
||||
## Legal
|
||||
> MIT licensed | **©** [Task Venture Capital GmbH](https://task.vc)
|
||||
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy)
|
||||
Creating archive files such as zip or tar.gz is straightforward with `smartarchive`. At the moment, you'll prepare the contents programmatically and then compress them. Detailed support for creating archives will be covered in future updates.
|
||||
|
||||
### Analyzing Archive Files
|
||||
|
||||
Analyzing the content of archives without extracting them can be useful in various scenarios, such as when you need to inspect the archive's content before deciding to extract it. Here's how you might analyze an archive:
|
||||
|
||||
```typescript
|
||||
async function analyzeArchive() {
|
||||
const url = 'https://example.com/archive.zip';
|
||||
|
||||
const archive = await SmartArchive.fromArchiveUrl(url);
|
||||
const analysisResult = await archive.analyzeContent();
|
||||
|
||||
console.log(analysisResult); // Outputs details about the archive content
|
||||
}
|
||||
```
|
||||
|
||||
Note: Replace `analyzeContent` with the appropriate method calls as per your implementation or update, as `smartarchive` provides foundational classes and methods for interaction with archive files but does not directly implement an `analyzeContent` method by default.
|
||||
|
||||
### Stream Operations
|
||||
|
||||
`smartarchive` offers streaming operations, allowing you to work with large archives efficiently. Here's an example of using streams to extract an archive:
|
||||
|
||||
```typescript
|
||||
import { createReadStream, createWriteStream } from 'fs';
|
||||
|
||||
async function extractArchiveUsingStream() {
|
||||
const archiveStream = createReadStream('/path/to/archive.zip');
|
||||
const archive = await SmartArchive.fromArchiveStream(archiveStream);
|
||||
const extractionStream = await archive.exportToStreamOfStreamFiles();
|
||||
|
||||
extractionStream.pipe(createWriteStream('/path/to/destination'));
|
||||
}
|
||||
```
|
||||
|
||||
### Conclusion
|
||||
|
||||
`@push.rocks/smartarchive` simplifies the process of working with various archive formats in JavaScript and TypeScript projects. By providing an easy-to-use API for common archive operations, it enables developers to integrate archive manipulation features into their applications efficiently.
|
||||
|
||||
Remember, the examples provided here are to give you a starting point. Depending on your specific use case, you may need to adjust these examples to fit your project's requirements. Always refer to the latest documentation for the most current information and methods available in `@push.rocks/smartarchive`.
|
||||
|
||||
For more information and API references, check the official [`@push.rocks/smartarchive` GitHub repository](https://github.com/pushrocks/smartarchive).
|
||||
|
||||
## License and Legal Information
|
||||
|
||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
||||
|
||||
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||
|
||||
### Trademarks
|
||||
|
||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
|
||||
|
||||
### Company Information
|
||||
|
||||
Task Venture Capital GmbH
|
||||
Registered at District court Bremen HRB 35230 HB, Germany
|
||||
|
||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
||||
|
||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||
|
13
test/plugins.ts
Normal file
13
test/plugins.ts
Normal file
@ -0,0 +1,13 @@
|
||||
import * as path from 'path';
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartrequest from '@push.rocks/smartrequest';
|
||||
import * as smartstream from '@push.rocks/smartstream';
|
||||
|
||||
export {
|
||||
path,
|
||||
smartpath,
|
||||
smartfile,
|
||||
smartrequest,
|
||||
smartstream,
|
||||
}
|
96
test/test.ts
96
test/test.ts
@ -1,23 +1,14 @@
|
||||
import { tap, expect } from '@push.rocks/tapbundle';
|
||||
|
||||
import * as path from 'path';
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartrequest from '@push.rocks/smartrequest';
|
||||
|
||||
const testPlugins = {
|
||||
path,
|
||||
smartfile,
|
||||
smartrequest,
|
||||
};
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
const testPaths = {
|
||||
nogitDir: testPlugins.path.join(
|
||||
smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
|
||||
nogitDir: plugins.path.join(
|
||||
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
|
||||
'../.nogit/'
|
||||
),
|
||||
remoteDir: testPlugins.path.join(
|
||||
smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
|
||||
remoteDir: plugins.path.join(
|
||||
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
|
||||
'../.nogit/remote'
|
||||
),
|
||||
};
|
||||
@ -25,80 +16,35 @@ const testPaths = {
|
||||
import * as smartarchive from '../ts/index.js';
|
||||
|
||||
tap.preTask('should prepare .nogit dir', async () => {
|
||||
await testPlugins.smartfile.fs.ensureDir(testPaths.remoteDir);
|
||||
await plugins.smartfile.fs.ensureDir(testPaths.remoteDir);
|
||||
});
|
||||
|
||||
tap.preTask('should prepare downloads', async (tools) => {
|
||||
const downloadedFile: Buffer = (
|
||||
await testPlugins.smartrequest.getBinary(
|
||||
await plugins.smartrequest.getBinary(
|
||||
'https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz'
|
||||
)
|
||||
).body;
|
||||
await testPlugins.smartfile.memory.toFs(
|
||||
await plugins.smartfile.memory.toFs(
|
||||
downloadedFile,
|
||||
testPlugins.path.join(testPaths.nogitDir, 'test.tgz')
|
||||
plugins.path.join(testPaths.nogitDir, 'test.tgz')
|
||||
);
|
||||
});
|
||||
|
||||
tap.test('should extract existing files on disk', async () => {
|
||||
const testSmartarchive = new smartarchive.SmartArchive();
|
||||
await testSmartarchive.extractArchiveFromFilePathToFs(
|
||||
testPlugins.path.join(testPaths.nogitDir, 'test.tgz'),
|
||||
testPlugins.path.join(testPaths.nogitDir)
|
||||
);
|
||||
});
|
||||
|
||||
tap.test('should download a package from the registry', async () => {
|
||||
const testSmartarchive = new smartarchive.SmartArchive();
|
||||
await testSmartarchive.extractArchiveFromUrlToFs(
|
||||
'https://verdaccio.lossless.digital/@pushrocks%2fsmartfile/-/smartfile-7.0.11.tgz',
|
||||
testPaths.remoteDir
|
||||
);
|
||||
});
|
||||
|
||||
tap.test('should extract a package using tarStream', async (tools) => {
|
||||
const done = tools.defer();
|
||||
const testSmartarchive = new smartarchive.SmartArchive();
|
||||
const testTgzBuffer = (
|
||||
await testPlugins.smartfile.Smartfile.fromFilePath(
|
||||
testPlugins.path.join(testPaths.nogitDir, 'test.tgz')
|
||||
)
|
||||
).contentBuffer;
|
||||
const extractionFileObservable = await testSmartarchive.extractArchiveFromBufferToObservable(
|
||||
testTgzBuffer
|
||||
);
|
||||
const subscription = extractionFileObservable.subscribe(
|
||||
(file) => {
|
||||
console.log(file.path);
|
||||
},
|
||||
(err) => {
|
||||
console.log(err);
|
||||
},
|
||||
() => {
|
||||
done.resolve();
|
||||
}
|
||||
);
|
||||
await done.promise;
|
||||
});
|
||||
|
||||
tap.test('should extract a file from url to replaySubject', async (tools) => {
|
||||
const done = tools.defer();
|
||||
const testSmartarchive = new smartarchive.SmartArchive();
|
||||
const extractionFileObservable = await testSmartarchive.extractArchiveFromUrlToObservable(
|
||||
const testSmartarchive = await smartarchive.SmartArchive.fromArchiveUrl(
|
||||
'https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz'
|
||||
);
|
||||
const subscription = extractionFileObservable.subscribe(
|
||||
(file) => {
|
||||
console.log(file.path);
|
||||
},
|
||||
(err) => {
|
||||
console.log(err);
|
||||
},
|
||||
() => {
|
||||
done.resolve();
|
||||
}
|
||||
);
|
||||
await done.promise;
|
||||
await testSmartarchive.exportToFs(testPaths.nogitDir);
|
||||
});
|
||||
|
||||
tap.start();
|
||||
tap.skip.test('should extract a b2zip', async () => {
|
||||
const dataUrl = 'https://daten.offeneregister.de/de_companies_ocdata.jsonl.bz2';
|
||||
const testArchive = await smartarchive.SmartArchive.fromArchiveUrl(dataUrl);
|
||||
await testArchive.exportToFs(
|
||||
plugins.path.join(testPaths.nogitDir, 'de_companies_ocdata.jsonl'),
|
||||
'data.jsonl',
|
||||
);
|
||||
})
|
||||
|
||||
await tap.start();
|
||||
|
@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartarchive',
|
||||
version: '3.0.7',
|
||||
description: 'work with archives'
|
||||
version: '4.0.24',
|
||||
description: 'A library for working with archive files, providing utilities for compressing and decompressing data.'
|
||||
}
|
||||
|
41
ts/bzip2/bititerator.ts
Normal file
41
ts/bzip2/bititerator.ts
Normal file
@ -0,0 +1,41 @@
|
||||
var BITMASK = [0, 0x01, 0x03, 0x07, 0x0F, 0x1F, 0x3F, 0x7F, 0xFF];
|
||||
|
||||
// returns a function that reads bits.
|
||||
// takes a buffer iterator as input
|
||||
export function bitIterator(nextBuffer: () => Buffer) {
|
||||
var bit = 0, byte = 0;
|
||||
var bytes = nextBuffer();
|
||||
var f = function(n) {
|
||||
if (n === null && bit != 0) { // align to byte boundary
|
||||
bit = 0
|
||||
byte++;
|
||||
return;
|
||||
}
|
||||
var result = 0;
|
||||
while(n > 0) {
|
||||
if (byte >= bytes.length) {
|
||||
byte = 0;
|
||||
bytes = nextBuffer();
|
||||
}
|
||||
var left = 8 - bit;
|
||||
if (bit === 0 && n > 0)
|
||||
// @ts-ignore
|
||||
f.bytesRead++;
|
||||
if (n >= left) {
|
||||
result <<= left;
|
||||
result |= (BITMASK[left] & bytes[byte++]);
|
||||
bit = 0;
|
||||
n -= left;
|
||||
} else {
|
||||
result <<= n;
|
||||
result |= ((bytes[byte] & (BITMASK[n] << (8 - n - bit))) >> (8 - n - bit));
|
||||
bit += n;
|
||||
n = 0;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
// @ts-ignore
|
||||
f.bytesRead = 0;
|
||||
return f;
|
||||
};
|
335
ts/bzip2/bzip2.ts
Normal file
335
ts/bzip2/bzip2.ts
Normal file
@ -0,0 +1,335 @@
|
||||
export class Bzip2Error extends Error {
|
||||
public name: string = 'Bzip2Error';
|
||||
public message: string;
|
||||
public stack = (new Error()).stack;
|
||||
|
||||
constructor(messageArg: string) {
|
||||
super();
|
||||
this.message = messageArg;
|
||||
}
|
||||
}
|
||||
|
||||
var messageArg = {
|
||||
Error: function(message) {throw new Bzip2Error(message);}
|
||||
};
|
||||
|
||||
export class Bzip2 {
|
||||
public Bzip2Error = Bzip2Error;
|
||||
public crcTable =
|
||||
[
|
||||
0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9,
|
||||
0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005,
|
||||
0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61,
|
||||
0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd,
|
||||
0x4c11db70, 0x48d0c6c7, 0x4593e01e, 0x4152fda9,
|
||||
0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75,
|
||||
0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011,
|
||||
0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd,
|
||||
0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039,
|
||||
0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5,
|
||||
0xbe2b5b58, 0xbaea46ef, 0xb7a96036, 0xb3687d81,
|
||||
0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d,
|
||||
0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49,
|
||||
0xc7361b4c, 0xc3f706fb, 0xceb42022, 0xca753d95,
|
||||
0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1,
|
||||
0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d,
|
||||
0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae,
|
||||
0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072,
|
||||
0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16,
|
||||
0x018aeb13, 0x054bf6a4, 0x0808d07d, 0x0cc9cdca,
|
||||
0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde,
|
||||
0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02,
|
||||
0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066,
|
||||
0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba,
|
||||
0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e,
|
||||
0xbfa1b04b, 0xbb60adfc, 0xb6238b25, 0xb2e29692,
|
||||
0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6,
|
||||
0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a,
|
||||
0xe0b41de7, 0xe4750050, 0xe9362689, 0xedf73b3e,
|
||||
0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2,
|
||||
0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686,
|
||||
0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a,
|
||||
0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637,
|
||||
0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb,
|
||||
0x4f040d56, 0x4bc510e1, 0x46863638, 0x42472b8f,
|
||||
0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53,
|
||||
0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47,
|
||||
0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b,
|
||||
0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff,
|
||||
0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623,
|
||||
0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60, 0xfc6c70d7,
|
||||
0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b,
|
||||
0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f,
|
||||
0xc423cd6a, 0xc0e2d0dd, 0xcda1f604, 0xc960ebb3,
|
||||
0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7,
|
||||
0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b,
|
||||
0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f,
|
||||
0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3,
|
||||
0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640,
|
||||
0x4e8ee645, 0x4a4ffbf2, 0x470cdd2b, 0x43cdc09c,
|
||||
0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8,
|
||||
0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24,
|
||||
0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30,
|
||||
0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec,
|
||||
0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088,
|
||||
0x2497d08d, 0x2056cd3a, 0x2d15ebe3, 0x29d4f654,
|
||||
0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0,
|
||||
0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c,
|
||||
0xe3a1cbc1, 0xe760d676, 0xea23f0af, 0xeee2ed18,
|
||||
0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4,
|
||||
0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0,
|
||||
0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c,
|
||||
0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668,
|
||||
0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4
|
||||
];
|
||||
|
||||
array = function(bytes) {
|
||||
var bit = 0, byte = 0;
|
||||
var BITMASK = [0, 0x01, 0x03, 0x07, 0x0F, 0x1F, 0x3F, 0x7F, 0xFF ];
|
||||
return function(n) {
|
||||
var result = 0;
|
||||
while(n > 0) {
|
||||
var left = 8 - bit;
|
||||
if (n >= left) {
|
||||
result <<= left;
|
||||
result |= (BITMASK[left] & bytes[byte++]);
|
||||
bit = 0;
|
||||
n -= left;
|
||||
} else {
|
||||
result <<= n;
|
||||
result |= ((bytes[byte] & (BITMASK[n] << (8 - n - bit))) >> (8 - n - bit));
|
||||
bit += n;
|
||||
n = 0;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
simple = function(srcbuffer, stream) {
|
||||
var bits = this.array(srcbuffer);
|
||||
var size = this.header(bits);
|
||||
var ret = false;
|
||||
var bufsize = 100000 * size;
|
||||
var buf = new Int32Array(bufsize);
|
||||
|
||||
do {
|
||||
ret = this.decompress(bits, stream, buf, bufsize);
|
||||
} while(!ret);
|
||||
}
|
||||
|
||||
header = function(bits) {
|
||||
this.byteCount = new Int32Array(256);
|
||||
this.symToByte = new Uint8Array(256);
|
||||
this.mtfSymbol = new Int32Array(256);
|
||||
this.selectors = new Uint8Array(0x8000);
|
||||
|
||||
if (bits(8*3) != 4348520) messageArg.Error("No magic number found");
|
||||
|
||||
var i = bits(8) - 48;
|
||||
if (i < 1 || i > 9) messageArg.Error("Not a BZIP archive");
|
||||
return i;
|
||||
};
|
||||
|
||||
decompress = function(bits, stream, buf, bufsize, streamCRC) {
|
||||
var MAX_HUFCODE_BITS = 20;
|
||||
var MAX_SYMBOLS = 258;
|
||||
var SYMBOL_RUNA = 0;
|
||||
var SYMBOL_RUNB = 1;
|
||||
var GROUP_SIZE = 50;
|
||||
var crc = 0 ^ (-1);
|
||||
|
||||
for(var h = '', i = 0; i < 6; i++) h += bits(8).toString(16);
|
||||
if (h == "177245385090") {
|
||||
var finalCRC = bits(32)|0;
|
||||
if (finalCRC !== streamCRC) messageArg.Error("Error in bzip2: crc32 do not match");
|
||||
// align stream to byte
|
||||
bits(null);
|
||||
return null; // reset streamCRC for next call
|
||||
}
|
||||
if (h != "314159265359") messageArg.Error("eek not valid bzip data");
|
||||
var crcblock = bits(32)|0; // CRC code
|
||||
if (bits(1)) messageArg.Error("unsupported obsolete version");
|
||||
var origPtr = bits(24);
|
||||
if (origPtr > bufsize) messageArg.Error("Initial position larger than buffer size");
|
||||
var t = bits(16);
|
||||
var symTotal = 0;
|
||||
for (i = 0; i < 16; i++) {
|
||||
if (t & (1 << (15 - i))) {
|
||||
var k = bits(16);
|
||||
for(j = 0; j < 16; j++) {
|
||||
if (k & (1 << (15 - j))) {
|
||||
this.symToByte[symTotal++] = (16 * i) + j;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var groupCount = bits(3);
|
||||
if (groupCount < 2 || groupCount > 6) messageArg.Error("another error");
|
||||
var nSelectors = bits(15);
|
||||
if (nSelectors == 0) messageArg.Error("meh");
|
||||
for(var i = 0; i < groupCount; i++) this.mtfSymbol[i] = i;
|
||||
|
||||
for(var i = 0; i < nSelectors; i++) {
|
||||
for(var j = 0; bits(1); j++) if (j >= groupCount) messageArg.Error("whoops another error");
|
||||
var uc = this.mtfSymbol[j];
|
||||
for(var k: any = j-1; k>=0; k--) {
|
||||
this.mtfSymbol[k+1] = this.mtfSymbol[k];
|
||||
}
|
||||
this.mtfSymbol[0] = uc;
|
||||
this.selectors[i] = uc;
|
||||
}
|
||||
|
||||
var symCount = symTotal + 2;
|
||||
var groups = [];
|
||||
var length = new Uint8Array(MAX_SYMBOLS),
|
||||
temp = new Uint16Array(MAX_HUFCODE_BITS+1);
|
||||
|
||||
var hufGroup;
|
||||
|
||||
for(var j = 0; j < groupCount; j++) {
|
||||
t = bits(5); //lengths
|
||||
for(var i = 0; i < symCount; i++) {
|
||||
while(true){
|
||||
if (t < 1 || t > MAX_HUFCODE_BITS) messageArg.Error("I gave up a while ago on writing error messages");
|
||||
if (!bits(1)) break;
|
||||
if (!bits(1)) t++;
|
||||
else t--;
|
||||
}
|
||||
length[i] = t;
|
||||
}
|
||||
var minLen, maxLen;
|
||||
minLen = maxLen = length[0];
|
||||
for(var i = 1; i < symCount; i++) {
|
||||
if (length[i] > maxLen) maxLen = length[i];
|
||||
else if (length[i] < minLen) minLen = length[i];
|
||||
}
|
||||
hufGroup = groups[j] = {};
|
||||
hufGroup.permute = new Int32Array(MAX_SYMBOLS);
|
||||
hufGroup.limit = new Int32Array(MAX_HUFCODE_BITS + 1);
|
||||
hufGroup.base = new Int32Array(MAX_HUFCODE_BITS + 1);
|
||||
|
||||
hufGroup.minLen = minLen;
|
||||
hufGroup.maxLen = maxLen;
|
||||
var base = hufGroup.base;
|
||||
var limit = hufGroup.limit;
|
||||
var pp = 0;
|
||||
for(var i: number = minLen; i <= maxLen; i++)
|
||||
for(var t: any = 0; t < symCount; t++)
|
||||
if (length[t] == i) hufGroup.permute[pp++] = t;
|
||||
for(i = minLen; i <= maxLen; i++) temp[i] = limit[i] = 0;
|
||||
for(i = 0; i < symCount; i++) temp[length[i]]++;
|
||||
pp = t = 0;
|
||||
for(i = minLen; i < maxLen; i++) {
|
||||
pp += temp[i];
|
||||
limit[i] = pp - 1;
|
||||
pp <<= 1;
|
||||
base[i+1] = pp - (t += temp[i]);
|
||||
}
|
||||
limit[maxLen] = pp + temp[maxLen] - 1;
|
||||
base[minLen] = 0;
|
||||
}
|
||||
|
||||
for(var i = 0; i < 256; i++) {
|
||||
this.mtfSymbol[i] = i;
|
||||
this.byteCount[i] = 0;
|
||||
}
|
||||
var runPos, count, symCount: number, selector;
|
||||
runPos = count = symCount = selector = 0;
|
||||
while(true) {
|
||||
if (!(symCount--)) {
|
||||
symCount = GROUP_SIZE - 1;
|
||||
if (selector >= nSelectors) messageArg.Error("meow i'm a kitty, that's an error");
|
||||
hufGroup = groups[this.selectors[selector++]];
|
||||
base = hufGroup.base;
|
||||
limit = hufGroup.limit;
|
||||
}
|
||||
i = hufGroup.minLen;
|
||||
j = bits(i);
|
||||
while(true) {
|
||||
if (i > hufGroup.maxLen) messageArg.Error("rawr i'm a dinosaur");
|
||||
if (j <= limit[i]) break;
|
||||
i++;
|
||||
j = (j << 1) | bits(1);
|
||||
}
|
||||
j -= base[i];
|
||||
if (j < 0 || j >= MAX_SYMBOLS) messageArg.Error("moo i'm a cow");
|
||||
var nextSym = hufGroup.permute[j];
|
||||
if (nextSym == SYMBOL_RUNA || nextSym == SYMBOL_RUNB) {
|
||||
if (!runPos){
|
||||
runPos = 1;
|
||||
t = 0;
|
||||
}
|
||||
if (nextSym == SYMBOL_RUNA) t += runPos;
|
||||
else t += 2 * runPos;
|
||||
runPos <<= 1;
|
||||
continue;
|
||||
}
|
||||
if (runPos) {
|
||||
runPos = 0;
|
||||
if (count + t > bufsize) messageArg.Error("Boom.");
|
||||
uc = this.symToByte[this.mtfSymbol[0]];
|
||||
this.byteCount[uc] += t;
|
||||
while(t--) buf[count++] = uc;
|
||||
}
|
||||
if (nextSym > symTotal) break;
|
||||
if (count >= bufsize) messageArg.Error("I can't think of anything. Error");
|
||||
i = nextSym - 1;
|
||||
uc = this.mtfSymbol[i];
|
||||
for(var k: any = i-1; k>=0; k--) {
|
||||
this.mtfSymbol[k+1] = this.mtfSymbol[k];
|
||||
}
|
||||
this.mtfSymbol[0] = uc
|
||||
uc = this.symToByte[uc];
|
||||
this.byteCount[uc]++;
|
||||
buf[count++] = uc;
|
||||
}
|
||||
if (origPtr < 0 || origPtr >= count) messageArg.Error("I'm a monkey and I'm throwing something at someone, namely you");
|
||||
var j = 0;
|
||||
for(var i = 0; i < 256; i++) {
|
||||
k = j + this.byteCount[i];
|
||||
this.byteCount[i] = j;
|
||||
j = k;
|
||||
}
|
||||
for(var i = 0; i < count; i++) {
|
||||
uc = buf[i] & 0xff;
|
||||
buf[this.byteCount[uc]] |= (i << 8);
|
||||
this.byteCount[uc]++;
|
||||
}
|
||||
var pos = 0, current = 0, run = 0;
|
||||
if (count) {
|
||||
pos = buf[origPtr];
|
||||
current = (pos & 0xff);
|
||||
pos >>= 8;
|
||||
run = -1;
|
||||
}
|
||||
count = count;
|
||||
var copies, previous, outbyte;
|
||||
while(count) {
|
||||
count--;
|
||||
previous = current;
|
||||
pos = buf[pos];
|
||||
current = pos & 0xff;
|
||||
pos >>= 8;
|
||||
if (run++ == 3) {
|
||||
copies = current;
|
||||
outbyte = previous;
|
||||
current = -1;
|
||||
} else {
|
||||
copies = 1;
|
||||
outbyte = current;
|
||||
}
|
||||
while(copies--) {
|
||||
crc = ((crc << 8) ^ this.crcTable[((crc>>24) ^ outbyte) & 0xFF])&0xFFFFFFFF; // crc32
|
||||
stream(outbyte);
|
||||
}
|
||||
if (current != previous) run = 0;
|
||||
}
|
||||
|
||||
crc = (crc ^ (-1)) >>> 0;
|
||||
if ((crc|0) != (crcblock|0)) messageArg.Error("Error in bzip2: crc32 do not match");
|
||||
streamCRC = (crc ^ ((streamCRC << 1) | (streamCRC >>> 31))) & 0xFFFFFFFF;
|
||||
return streamCRC;
|
||||
};
|
||||
};
|
93
ts/bzip2/index.ts
Normal file
93
ts/bzip2/index.ts
Normal file
@ -0,0 +1,93 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
|
||||
import { Bzip2 } from './bzip2.js';
|
||||
import { bitIterator } from './bititerator.js';
|
||||
|
||||
export function unbzip2Stream() {
|
||||
const bzip2Instance = new Bzip2();
|
||||
var bufferQueue = [];
|
||||
var hasBytes = 0;
|
||||
var blockSize = 0;
|
||||
var broken = false;
|
||||
var done = false;
|
||||
var bitReader = null;
|
||||
var streamCRC = null;
|
||||
|
||||
function decompressBlock() {
|
||||
if (!blockSize) {
|
||||
blockSize = bzip2Instance.header(bitReader);
|
||||
streamCRC = 0;
|
||||
} else {
|
||||
var bufsize = 100000 * blockSize;
|
||||
var buf = new Int32Array(bufsize);
|
||||
|
||||
var chunk = [];
|
||||
var f = function (b) {
|
||||
chunk.push(b);
|
||||
};
|
||||
|
||||
streamCRC = bzip2Instance.decompress(bitReader, f, buf, bufsize, streamCRC);
|
||||
if (streamCRC === null) {
|
||||
// reset for next bzip2 header
|
||||
blockSize = 0;
|
||||
return;
|
||||
} else {
|
||||
return Buffer.from(chunk);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var outlength = 0;
|
||||
const decompressAndPush = async () => {
|
||||
if (broken) return;
|
||||
try {
|
||||
const resultChunk = decompressBlock();
|
||||
if (resultChunk) {
|
||||
outlength += resultChunk.length;
|
||||
}
|
||||
return resultChunk;
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
broken = true;
|
||||
}
|
||||
};
|
||||
let counter = 0;
|
||||
return new plugins.smartstream.SmartDuplex({
|
||||
objectMode: true,
|
||||
name: 'bzip2',
|
||||
debug: false,
|
||||
highWaterMark: 1,
|
||||
writeFunction: async function (data, streamTools) {
|
||||
// console.log(`got chunk ${counter++}`)
|
||||
bufferQueue.push(data);
|
||||
hasBytes += data.length;
|
||||
if (bitReader === null) {
|
||||
bitReader = bitIterator(function () {
|
||||
return bufferQueue.shift();
|
||||
});
|
||||
}
|
||||
while (!broken && hasBytes - bitReader.bytesRead + 1 >= (25000 + 100000 * blockSize || 4)) {
|
||||
//console.error('decompressing with', hasBytes - bitReader.bytesRead + 1, 'bytes in buffer');
|
||||
const result = await decompressAndPush();
|
||||
if (!result) {
|
||||
continue;
|
||||
}
|
||||
// console.log(result.toString());
|
||||
await streamTools.push(result);
|
||||
}
|
||||
},
|
||||
finalFunction: async function (streamTools) {
|
||||
//console.error(x,'last compressing with', hasBytes, 'bytes in buffer');
|
||||
while (!broken && bitReader && hasBytes > bitReader.bytesRead) {
|
||||
const result = await decompressAndPush();
|
||||
if (!result) {
|
||||
continue;
|
||||
}
|
||||
await streamTools.push(result);
|
||||
}
|
||||
if (!broken) {
|
||||
if (streamCRC !== null) this.emit('error', new Error('input stream ended prematurely'));
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
82
ts/classes.archiveanalyzer.ts
Normal file
82
ts/classes.archiveanalyzer.ts
Normal file
@ -0,0 +1,82 @@
|
||||
import type { SmartArchive } from './classes.smartarchive.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
export interface IAnalyzedResult {
|
||||
fileType: plugins.fileType.FileTypeResult;
|
||||
isArchive: boolean;
|
||||
resultStream: plugins.smartstream.SmartDuplex;
|
||||
decompressionStream: plugins.stream.Transform | plugins.stream.Duplex | plugins.tarStream.Extract;
|
||||
}
|
||||
|
||||
export class ArchiveAnalyzer {
|
||||
smartArchiveRef: SmartArchive;
|
||||
|
||||
constructor(smartArchiveRefArg: SmartArchive) {
|
||||
this.smartArchiveRef = smartArchiveRefArg;
|
||||
}
|
||||
|
||||
private async mimeTypeIsArchive(mimeType: string): Promise<boolean> {
|
||||
const archiveMimeTypes: Set<string> = new Set([
|
||||
'application/zip',
|
||||
'application/x-rar-compressed',
|
||||
'application/x-tar',
|
||||
'application/gzip',
|
||||
'application/x-7z-compressed',
|
||||
'application/x-bzip2',
|
||||
// Add other archive mime types here
|
||||
]);
|
||||
|
||||
return archiveMimeTypes.has(mimeType);
|
||||
}
|
||||
|
||||
|
||||
private async getDecompressionStream(
|
||||
mimeTypeArg: plugins.fileType.FileTypeResult['mime']
|
||||
): Promise<plugins.stream.Transform | plugins.stream.Duplex | plugins.tarStream.Extract> {
|
||||
switch (mimeTypeArg) {
|
||||
case 'application/gzip':
|
||||
return this.smartArchiveRef.gzipTools.getDecompressionStream();
|
||||
case 'application/zip':
|
||||
return this.smartArchiveRef.zipTools.getDecompressionStream();
|
||||
case 'application/x-bzip2':
|
||||
return await this.smartArchiveRef.bzip2Tools.getDecompressionStream(); // replace with your own bzip2 decompression stream
|
||||
case 'application/x-tar':
|
||||
return this.smartArchiveRef.tarTools.getDecompressionStream(); // replace with your own tar decompression stream
|
||||
default:
|
||||
// Handle unsupported formats or no decompression needed
|
||||
return plugins.smartstream.createPassThrough();
|
||||
}
|
||||
}
|
||||
|
||||
public getAnalyzedStream() {
|
||||
let firstRun = true;
|
||||
const resultStream = plugins.smartstream.createPassThrough();
|
||||
const analyzerstream = new plugins.smartstream.SmartDuplex<Buffer, IAnalyzedResult>({
|
||||
readableObjectMode: true,
|
||||
writeFunction: async (chunkArg: Buffer, streamtools) => {
|
||||
if (firstRun) {
|
||||
firstRun = false;
|
||||
const fileType = await plugins.fileType.fileTypeFromBuffer(chunkArg);
|
||||
const decompressionStream = await this.getDecompressionStream(fileType?.mime as any);
|
||||
/**
|
||||
* analyzed stream emits once with this object
|
||||
*/
|
||||
const result: IAnalyzedResult = {
|
||||
fileType,
|
||||
isArchive: await this.mimeTypeIsArchive(fileType?.mime),
|
||||
resultStream,
|
||||
decompressionStream,
|
||||
};
|
||||
await streamtools.push(result);
|
||||
}
|
||||
await resultStream.backpressuredPush(chunkArg);
|
||||
return null;
|
||||
},
|
||||
finalFunction: async (tools) => {
|
||||
resultStream.push(null);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
return analyzerstream;
|
||||
}
|
||||
}
|
16
ts/classes.bzip2tools.ts
Normal file
16
ts/classes.bzip2tools.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import type { SmartArchive } from './classes.smartarchive.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
import { unbzip2Stream } from './bzip2/index.js';
|
||||
|
||||
export class Bzip2Tools {
|
||||
smartArchiveRef: SmartArchive;
|
||||
|
||||
constructor(smartArchiveRefArg: SmartArchive) {
|
||||
this.smartArchiveRef = smartArchiveRefArg;
|
||||
}
|
||||
|
||||
getDecompressionStream() {
|
||||
return unbzip2Stream();
|
||||
}
|
||||
}
|
59
ts/classes.gziptools.ts
Normal file
59
ts/classes.gziptools.ts
Normal file
@ -0,0 +1,59 @@
|
||||
import type { SmartArchive } from './classes.smartarchive.js';
|
||||
import * as plugins from './plugins.js'
|
||||
|
||||
// This class wraps fflate's gunzip in a Node.js Transform stream
|
||||
export class CompressGunzipTransform extends plugins.stream.Transform {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
|
||||
_transform(chunk: Buffer, encoding: BufferEncoding, callback: plugins.stream.TransformCallback) {
|
||||
plugins.fflate.gunzip(chunk, (err, decompressed) => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
} else {
|
||||
this.push(decompressed);
|
||||
callback();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// DecompressGunzipTransform class that extends the Node.js Transform stream to
|
||||
// create a stream that decompresses GZip-compressed data using fflate's gunzip function
|
||||
export class DecompressGunzipTransform extends plugins.stream.Transform {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
|
||||
_transform(chunk: Buffer, encoding: BufferEncoding, callback: plugins.stream.TransformCallback) {
|
||||
// Use fflate's gunzip function to decompress the chunk
|
||||
plugins.fflate.gunzip(chunk, (err, decompressed) => {
|
||||
if (err) {
|
||||
// If an error occurs during decompression, pass the error to the callback
|
||||
callback(err);
|
||||
} else {
|
||||
// If decompression is successful, push the decompressed data into the stream
|
||||
this.push(decompressed);
|
||||
callback();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export class GzipTools {
|
||||
smartArchiveRef: SmartArchive;
|
||||
|
||||
constructor(smartArchiveRefArg: SmartArchive) {
|
||||
this.smartArchiveRef = smartArchiveRefArg;
|
||||
}
|
||||
|
||||
public getCompressionStream() {
|
||||
return new CompressGunzipTransform();
|
||||
}
|
||||
|
||||
public getDecompressionStream() {
|
||||
return new DecompressGunzipTransform();
|
||||
}
|
||||
}
|
176
ts/classes.smartarchive.ts
Normal file
176
ts/classes.smartarchive.ts
Normal file
@ -0,0 +1,176 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import * as paths from './paths.js';
|
||||
|
||||
import { Bzip2Tools } from './classes.bzip2tools.js';
|
||||
import { GzipTools } from './classes.gziptools.js';
|
||||
import { TarTools } from './classes.tartools.js';
|
||||
import { ZipTools } from './classes.ziptools.js';
|
||||
|
||||
import { ArchiveAnalyzer, type IAnalyzedResult } from './classes.archiveanalyzer.js';
|
||||
|
||||
import type { from } from '@push.rocks/smartrx/dist_ts/smartrx.plugins.rxjs.js';
|
||||
|
||||
export class SmartArchive {
|
||||
// STATIC
|
||||
public static async fromArchiveUrl(urlArg: string): Promise<SmartArchive> {
|
||||
const smartArchiveInstance = new SmartArchive();
|
||||
smartArchiveInstance.sourceUrl = urlArg;
|
||||
return smartArchiveInstance;
|
||||
}
|
||||
|
||||
public static async fromArchiveFile(filePathArg: string): Promise<SmartArchive> {
|
||||
const smartArchiveInstance = new SmartArchive();
|
||||
smartArchiveInstance.sourceFilePath = filePathArg;
|
||||
return smartArchiveInstance;
|
||||
}
|
||||
|
||||
public static async fromArchiveStream(
|
||||
streamArg: plugins.stream.Readable | plugins.stream.Duplex | plugins.stream.Transform
|
||||
): Promise<SmartArchive> {
|
||||
const smartArchiveInstance = new SmartArchive();
|
||||
smartArchiveInstance.sourceStream = streamArg;
|
||||
return smartArchiveInstance;
|
||||
}
|
||||
|
||||
// INSTANCE
|
||||
public gzipTools = new GzipTools(this);
|
||||
public bzip2Tools = new Bzip2Tools(this);
|
||||
public tarTools = new TarTools(this);
|
||||
public zipTools = new ZipTools(this);
|
||||
public archiveAnalyzer = new ArchiveAnalyzer(this);
|
||||
|
||||
public sourceUrl: string;
|
||||
public sourceFilePath: string;
|
||||
public sourceStream: plugins.stream.Readable | plugins.stream.Duplex | plugins.stream.Transform;
|
||||
|
||||
public archiveName: string;
|
||||
public singleFileMode: boolean = false;
|
||||
|
||||
public addedDirectories: string[] = [];
|
||||
public addedFiles: (plugins.smartfile.SmartFile | plugins.smartfile.StreamFile)[] = [];
|
||||
public addedUrls: string[] = [];
|
||||
|
||||
constructor() {}
|
||||
|
||||
/**
|
||||
* gets the original archive stream
|
||||
*/
|
||||
public async getArchiveStream() {
|
||||
if (this.sourceStream) {
|
||||
return this.sourceStream;
|
||||
}
|
||||
if (this.sourceUrl) {
|
||||
const urlStream = await plugins.smartrequest.getStream(this.sourceUrl);
|
||||
return urlStream;
|
||||
}
|
||||
if (this.sourceFilePath) {
|
||||
const fileStream = plugins.smartfile.fs.toReadStream(this.sourceFilePath);
|
||||
return fileStream;
|
||||
}
|
||||
}
|
||||
|
||||
public async exportToTarGzStream() {
|
||||
const tarPackStream = await this.tarTools.getPackStream();
|
||||
const gzipStream = await this.gzipTools.getCompressionStream();
|
||||
// const archiveStream = tarPackStream.pipe(gzipStream);
|
||||
// return archiveStream;
|
||||
}
|
||||
|
||||
public async exportToFs(targetDir: string, fileNameArg?: string): Promise<void> {
|
||||
const done = plugins.smartpromise.defer<void>();
|
||||
const streamFileStream = await this.exportToStreamOfStreamFiles();
|
||||
streamFileStream.pipe(
|
||||
new plugins.smartstream.SmartDuplex({
|
||||
objectMode: true,
|
||||
writeFunction: async (streamFileArg: plugins.smartfile.StreamFile, streamtools) => {
|
||||
const done = plugins.smartpromise.defer<void>();
|
||||
console.log(streamFileArg.relativeFilePath ? streamFileArg.relativeFilePath : 'no relative path');
|
||||
const streamFile = streamFileArg;
|
||||
const readStream = await streamFile.createReadStream();
|
||||
await plugins.smartfile.fs.ensureDir(targetDir);
|
||||
const writePath = plugins.path.join(
|
||||
targetDir,
|
||||
streamFile.relativeFilePath || fileNameArg
|
||||
);
|
||||
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(writePath));
|
||||
const writeStream = plugins.smartfile.fsStream.createWriteStream(writePath);
|
||||
readStream.pipe(writeStream);
|
||||
writeStream.on('finish', () => {
|
||||
done.resolve();
|
||||
});
|
||||
await done.promise;
|
||||
},
|
||||
finalFunction: async () => {
|
||||
done.resolve();
|
||||
},
|
||||
})
|
||||
);
|
||||
return done.promise;
|
||||
}
|
||||
|
||||
public async exportToStreamOfStreamFiles() {
|
||||
const streamFileIntake = new plugins.smartstream.StreamIntake<plugins.smartfile.StreamFile>({
|
||||
objectMode: true,
|
||||
});
|
||||
const archiveStream = await this.getArchiveStream();
|
||||
const createAnalyzedStream = () => this.archiveAnalyzer.getAnalyzedStream();
|
||||
|
||||
// lets create a function that can be called multiple times to unpack layers of archives
|
||||
const createUnpackStream = () =>
|
||||
plugins.smartstream.createTransformFunction<IAnalyzedResult, any>(
|
||||
async (analyzedResultChunk) => {
|
||||
if (analyzedResultChunk.fileType?.mime === 'application/x-tar') {
|
||||
const tarStream = analyzedResultChunk.decompressionStream as plugins.tarStream.Extract;
|
||||
tarStream.on('entry', async (header, stream, next) => {
|
||||
if (header.type === 'directory') {
|
||||
console.log(`tar stream directory: ${header.name} ... skipping!`);
|
||||
next();
|
||||
return;
|
||||
}
|
||||
console.log(`tar stream file: ${header.name}`);
|
||||
const streamfile = plugins.smartfile.StreamFile.fromStream(stream, header.name);
|
||||
streamFileIntake.push(streamfile);
|
||||
stream.on('end', function () {
|
||||
next(); // ready for next entry
|
||||
});
|
||||
});
|
||||
tarStream.on('finish', function () {
|
||||
console.log('finished');
|
||||
streamFileIntake.signalEnd();
|
||||
});
|
||||
analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream);
|
||||
} else if (analyzedResultChunk.fileType?.mime === 'application/zip') {
|
||||
analyzedResultChunk.resultStream
|
||||
.pipe(analyzedResultChunk.decompressionStream)
|
||||
.pipe(new plugins.smartstream.SmartDuplex({
|
||||
objectMode: true,
|
||||
writeFunction: async (streamFileArg: plugins.smartfile.StreamFile, streamtools) => {
|
||||
streamFileIntake.push(streamFileArg);
|
||||
},
|
||||
finalFunction: async () => {
|
||||
streamFileIntake.signalEnd();
|
||||
}
|
||||
}));
|
||||
} else if (analyzedResultChunk.isArchive && analyzedResultChunk.decompressionStream) {
|
||||
analyzedResultChunk.resultStream
|
||||
.pipe(analyzedResultChunk.decompressionStream)
|
||||
.pipe(createAnalyzedStream())
|
||||
.pipe(createUnpackStream());
|
||||
} else {
|
||||
const streamFile = plugins.smartfile.StreamFile.fromStream(
|
||||
analyzedResultChunk.resultStream,
|
||||
analyzedResultChunk.fileType?.ext
|
||||
);
|
||||
streamFileIntake.push(streamFile);
|
||||
streamFileIntake.signalEnd();
|
||||
}
|
||||
},
|
||||
{
|
||||
objectMode: true,
|
||||
}
|
||||
);
|
||||
|
||||
archiveStream.pipe(createAnalyzedStream()).pipe(createUnpackStream());
|
||||
return streamFileIntake;
|
||||
}
|
||||
}
|
36
ts/classes.tartools.ts
Normal file
36
ts/classes.tartools.ts
Normal file
@ -0,0 +1,36 @@
|
||||
import type { SmartArchive } from './classes.smartarchive.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
export class TarTools {
|
||||
smartArchiveRef: SmartArchive;
|
||||
|
||||
constructor(smartArchiveRefArg: SmartArchive) {
|
||||
this.smartArchiveRef = smartArchiveRefArg;
|
||||
}
|
||||
|
||||
// packing
|
||||
public addFileToPack(pack: plugins.tarStream.Pack, fileName: string, content: string | Buffer) {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const entry = pack.entry({ name: fileName, size: content.length }, (err: Error) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
|
||||
entry.write(content);
|
||||
entry.end();
|
||||
});
|
||||
}
|
||||
|
||||
public async getPackStream() {
|
||||
const pack = plugins.tarStream.pack();
|
||||
return pack;
|
||||
}
|
||||
|
||||
// extracting
|
||||
getDecompressionStream() {
|
||||
return plugins.tarStream.extract();
|
||||
}
|
||||
}
|
77
ts/classes.ziptools.ts
Normal file
77
ts/classes.ziptools.ts
Normal file
@ -0,0 +1,77 @@
|
||||
import type { SmartArchive } from './classes.smartarchive.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
class DecompressZipTransform extends plugins.smartstream.SmartDuplex<ArrayBufferLike> {
|
||||
private streamtools: plugins.smartstream.IStreamTools;
|
||||
private unzipper = new plugins.fflate.Unzip(async (fileArg) => {
|
||||
let resultBuffer: Buffer;
|
||||
fileArg.ondata = async (dataArg, dat, final) => {
|
||||
resultBuffer? resultBuffer = Buffer.concat([resultBuffer, Buffer.from(dat)])
|
||||
: resultBuffer = Buffer.from(dat);
|
||||
if (final) {
|
||||
const streamFile = plugins.smartfile.StreamFile.fromBuffer(resultBuffer);
|
||||
streamFile.relativeFilePath = fileArg.name;
|
||||
this.streamtools.push(streamFile);
|
||||
}
|
||||
}
|
||||
fileArg.start();
|
||||
});
|
||||
constructor() {
|
||||
super({
|
||||
objectMode: true,
|
||||
writeFunction: async (chunkArg: Buffer, streamtoolsArg) => {
|
||||
this.streamtools? null : this.streamtools = streamtoolsArg;
|
||||
this.unzipper.push(chunkArg, false);
|
||||
},
|
||||
finalFunction: async () => {
|
||||
this.unzipper.push(Buffer.from(''), true);
|
||||
await plugins.smartdelay.delayFor(0);
|
||||
await this.streamtools.push(null);
|
||||
}
|
||||
});
|
||||
this.unzipper.register(plugins.fflate.UnzipInflate);
|
||||
}
|
||||
}
|
||||
|
||||
// This class wraps fflate's zip in a Node.js Transform stream for compression
|
||||
export class CompressZipTransform extends plugins.stream.Transform {
|
||||
files: { [fileName: string]: Uint8Array };
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.files = {};
|
||||
}
|
||||
|
||||
_transform(chunk: Buffer, encoding: BufferEncoding, callback: plugins.stream.TransformCallback) {
|
||||
// Simple example: storing chunks in memory before finalizing ZIP in _flush
|
||||
this.files['file.txt'] = new Uint8Array(chunk);
|
||||
callback();
|
||||
}
|
||||
|
||||
_flush(callback: plugins.stream.TransformCallback) {
|
||||
plugins.fflate.zip(this.files, (err, zipped) => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
} else {
|
||||
this.push(Buffer.from(zipped));
|
||||
callback();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export class ZipTools {
|
||||
smartArchiveRef: SmartArchive;
|
||||
|
||||
constructor(smartArchiveRefArg: SmartArchive) {
|
||||
this.smartArchiveRef = smartArchiveRefArg;
|
||||
}
|
||||
|
||||
public getCompressionStream() {
|
||||
return new CompressZipTransform();
|
||||
}
|
||||
|
||||
public getDecompressionStream() {
|
||||
return new DecompressZipTransform();
|
||||
}
|
||||
}
|
@ -1 +1 @@
|
||||
export * from './smartarchive.classes.smartarchive.js';
|
||||
export * from './classes.smartarchive.js';
|
||||
|
@ -1,4 +1,4 @@
|
||||
import * as plugins from './smartarchive.plugins.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
export const packageDir = plugins.path.join(
|
||||
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
|
@ -1,24 +1,25 @@
|
||||
// node native scope
|
||||
import * as path from 'path';
|
||||
import * as stream from 'stream';
|
||||
|
||||
export { path };
|
||||
export { path, stream };
|
||||
|
||||
// @pushrocks scope
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartdelay from '@push.rocks/smartdelay';
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smartrequest from '@push.rocks/smartrequest';
|
||||
import * as smartunique from '@push.rocks/smartunique';
|
||||
import * as smartstream from '@push.rocks/smartstream';
|
||||
import * as smartrx from '@push.rocks/smartrx';
|
||||
import * as smarturl from '@push.rocks/smarturl';
|
||||
|
||||
export { smartfile, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx };
|
||||
export { smartfile, smartdelay, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx, smarturl };
|
||||
|
||||
// third party scope
|
||||
import gunzipMaybe from 'gunzip-maybe';
|
||||
|
||||
// @ts-ignore
|
||||
import tar from 'tar';
|
||||
import * as fileType from 'file-type';
|
||||
import * as fflate from 'fflate';
|
||||
import tarStream from 'tar-stream';
|
||||
|
||||
export { gunzipMaybe, tar, tarStream };
|
||||
export { fileType, fflate, tarStream };
|
@ -1,133 +0,0 @@
|
||||
import * as plugins from './smartarchive.plugins.js';
|
||||
import * as paths from './smartarchive.paths.js';
|
||||
|
||||
export class SmartArchive {
|
||||
constructor() {}
|
||||
|
||||
/**
|
||||
* extracts an archive from a given url
|
||||
*/
|
||||
public async extractArchiveFromUrlToFs(urlArg: string, targetDir: string) {
|
||||
const parsedPath = plugins.path.parse(urlArg);
|
||||
const uniqueFileName = plugins.smartunique.uni() + parsedPath.ext;
|
||||
plugins.smartfile.fs.ensureDir(paths.nogitDir); // TODO: totally remove caching needs
|
||||
const downloadPath = plugins.path.join(paths.nogitDir, uniqueFileName);
|
||||
const downloadedArchive = (await plugins.smartrequest.getBinary(urlArg)).body;
|
||||
await plugins.smartfile.memory.toFs(downloadedArchive, downloadPath);
|
||||
await this.extractArchiveFromFilePathToFs(downloadPath, targetDir);
|
||||
await plugins.smartfile.fs.remove(downloadPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* extracts an archive from a given filePath on disk
|
||||
* @param filePathArg
|
||||
* @param targetDirArg
|
||||
*/
|
||||
public async extractArchiveFromFilePathToFs(filePathArg: string, targetDirArg: string) {
|
||||
console.log(`extracting ${filePathArg}`);
|
||||
const done = plugins.smartpromise.defer();
|
||||
filePathArg = plugins.smartpath.transform.makeAbsolute(filePathArg);
|
||||
targetDirArg = plugins.smartpath.transform.makeAbsolute(targetDirArg);
|
||||
const readableStream = plugins.smartfile.fsStream.createReadStream(filePathArg);
|
||||
const extractPipeStop = plugins.tarStream.extract();
|
||||
extractPipeStop.on('entry', async (header, stream, next) => {
|
||||
const targetFilePath = plugins.path.join(targetDirArg, header.name);
|
||||
const parsedPath = plugins.path.parse(targetFilePath);
|
||||
await plugins.smartfile.fs.ensureDir(parsedPath.dir);
|
||||
const writeStream = plugins.smartfile.fsStream.createWriteStream(targetFilePath);
|
||||
stream.pipe(writeStream);
|
||||
stream.on('end', () => {
|
||||
console.log(`extracted ${header.name}`);
|
||||
next();
|
||||
});
|
||||
stream.resume();
|
||||
});
|
||||
extractPipeStop.on('finish', () => {
|
||||
console.log(`Sucessfully extracted ${filePathArg}!`);
|
||||
done.resolve();
|
||||
});
|
||||
|
||||
// lets run the stream
|
||||
readableStream.pipe(plugins.gunzipMaybe()).pipe(extractPipeStop);
|
||||
await done.promise;
|
||||
}
|
||||
|
||||
/**
|
||||
* extracts to Observable
|
||||
* where the Observable is emitting smartfiles
|
||||
*/
|
||||
public async extractArchiveFromBufferToObservable(
|
||||
bufferArg: Buffer
|
||||
): Promise<plugins.smartrx.rxjs.ReplaySubject<plugins.smartfile.Smartfile>> {
|
||||
const { intake, replaySubject } = this.extractArchiveWithIntakeAndReplaySubject();
|
||||
intake.pushData(bufferArg);
|
||||
intake.signalEnd();
|
||||
return replaySubject;
|
||||
}
|
||||
|
||||
extractArchiveWithIntakeAndReplaySubject() {
|
||||
const intake = new plugins.smartstream.StreamIntake<Buffer>();
|
||||
const replaySubject = new plugins.smartrx.rxjs.ReplaySubject<plugins.smartfile.Smartfile>();
|
||||
const readableStream = intake.getReadableStream();
|
||||
const extractPipeStop = plugins.tarStream.extract();
|
||||
extractPipeStop.on('entry', (header, stream, next) => {
|
||||
let fileBuffer: Buffer;
|
||||
stream.on('data', (chunkArg) => {
|
||||
if (!fileBuffer) {
|
||||
fileBuffer = chunkArg;
|
||||
} else {
|
||||
fileBuffer = Buffer.concat([fileBuffer, chunkArg]);
|
||||
}
|
||||
});
|
||||
stream.on('end', () => {
|
||||
replaySubject.next(
|
||||
new plugins.smartfile.Smartfile({
|
||||
base: null, // no working directory for this one
|
||||
contentBuffer: fileBuffer,
|
||||
path: `${header.name}`,
|
||||
})
|
||||
);
|
||||
next();
|
||||
});
|
||||
stream.resume();
|
||||
});
|
||||
extractPipeStop.on('finish', () => {
|
||||
replaySubject.complete();
|
||||
});
|
||||
// lets run the stream
|
||||
readableStream.pipe(plugins.gunzipMaybe()).pipe(extractPipeStop);
|
||||
return {
|
||||
intake,
|
||||
replaySubject,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* extracts to Observable
|
||||
*/
|
||||
public async extractArchiveFromUrlToObservable(
|
||||
urlArg: string
|
||||
): Promise<plugins.smartrx.rxjs.ReplaySubject<plugins.smartfile.Smartfile>> {
|
||||
const response = await plugins.smartrequest.getBinary(urlArg);
|
||||
const replaySubject = this.extractArchiveFromBufferToObservable(response.body);
|
||||
return replaySubject;
|
||||
}
|
||||
|
||||
// TODO
|
||||
public async extractArchiveFromUrlToStream() {}
|
||||
|
||||
// TODO
|
||||
public async extractArchiveFromFilePathToStream() {}
|
||||
|
||||
// TODO
|
||||
public async extractArchiveFromStreamToStream() {}
|
||||
|
||||
// TODO
|
||||
public async packFromStreamToStream() {}
|
||||
|
||||
// TODO
|
||||
public async packFromDirPathToStream() {}
|
||||
|
||||
// TODO
|
||||
public async packFromDirPathToFs() {}
|
||||
}
|
@ -3,7 +3,12 @@
|
||||
"experimentalDecorators": true,
|
||||
"useDefineForClassFields": false,
|
||||
"target": "ES2022",
|
||||
"module": "ES2022",
|
||||
"moduleResolution": "nodenext"
|
||||
}
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"esModuleInterop": true,
|
||||
"verbatimModuleSyntax": true
|
||||
},
|
||||
"exclude": [
|
||||
"dist_*/**/*.d.ts"
|
||||
]
|
||||
}
|
||||
|
Reference in New Issue
Block a user