Compare commits
No commits in common. "master" and "v1.0.12" have entirely different histories.
@ -1,66 +0,0 @@
|
||||
name: Default (not tags)
|
||||
|
||||
on:
|
||||
push:
|
||||
tags-ignore:
|
||||
- '**'
|
||||
|
||||
env:
|
||||
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
|
||||
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
|
||||
|
||||
jobs:
|
||||
security:
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install pnpm and npmci
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
|
||||
- name: Run npm prepare
|
||||
run: npmci npm prepare
|
||||
|
||||
- name: Audit production dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --prod
|
||||
continue-on-error: true
|
||||
|
||||
- name: Audit development dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --dev
|
||||
continue-on-error: true
|
||||
|
||||
test:
|
||||
if: ${{ always() }}
|
||||
needs: security
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Test stable
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm test
|
||||
|
||||
- name: Test build
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm build
|
@ -1,124 +0,0 @@
|
||||
name: Default (tags)
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
env:
|
||||
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
|
||||
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
|
||||
|
||||
jobs:
|
||||
security:
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Audit production dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --prod
|
||||
continue-on-error: true
|
||||
|
||||
- name: Audit development dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --dev
|
||||
continue-on-error: true
|
||||
|
||||
test:
|
||||
if: ${{ always() }}
|
||||
needs: security
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Test stable
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm test
|
||||
|
||||
- name: Test build
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm build
|
||||
|
||||
release:
|
||||
needs: test
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Release
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm publish
|
||||
|
||||
metadata:
|
||||
needs: test
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
continue-on-error: true
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Code quality
|
||||
run: |
|
||||
npmci command npm install -g typescript
|
||||
npmci npm install
|
||||
|
||||
- name: Trigger
|
||||
run: npmci trigger
|
||||
|
||||
- name: Build docs and upload artifacts
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
pnpm install -g @git.zone/tsdoc
|
||||
npmci command tsdoc
|
||||
continue-on-error: true
|
127
.gitlab-ci.yml
Normal file
127
.gitlab-ci.yml
Normal file
@ -0,0 +1,127 @@
|
||||
# gitzone ci_default
|
||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||
|
||||
cache:
|
||||
paths:
|
||||
- .npmci_cache/
|
||||
key: '$CI_BUILD_STAGE'
|
||||
|
||||
stages:
|
||||
- security
|
||||
- test
|
||||
- release
|
||||
- metadata
|
||||
|
||||
# ====================
|
||||
# security stage
|
||||
# ====================
|
||||
mirror:
|
||||
stage: security
|
||||
script:
|
||||
- npmci git mirror
|
||||
tags:
|
||||
- lossless
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
audit:
|
||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||
stage: security
|
||||
script:
|
||||
- npmci npm prepare
|
||||
- npmci command npm install --ignore-scripts
|
||||
- npmci command npm config set registry https://registry.npmjs.org
|
||||
- npmci command npm audit --audit-level=moderate
|
||||
tags:
|
||||
- lossless
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
# ====================
|
||||
# test stage
|
||||
# ====================
|
||||
|
||||
testStable:
|
||||
stage: test
|
||||
script:
|
||||
- npmci npm prepare
|
||||
- npmci node install stable
|
||||
- npmci npm install
|
||||
- npmci npm test
|
||||
coverage: /\d+.?\d+?\%\s*coverage/
|
||||
tags:
|
||||
- lossless
|
||||
- docker
|
||||
- priv
|
||||
|
||||
testBuild:
|
||||
stage: test
|
||||
script:
|
||||
- npmci npm prepare
|
||||
- npmci node install stable
|
||||
- npmci npm install
|
||||
- npmci command npm run build
|
||||
coverage: /\d+.?\d+?\%\s*coverage/
|
||||
tags:
|
||||
- lossless
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
release:
|
||||
stage: release
|
||||
script:
|
||||
- npmci node install stable
|
||||
- npmci npm publish
|
||||
only:
|
||||
- tags
|
||||
tags:
|
||||
- lossless
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
# ====================
|
||||
# metadata stage
|
||||
# ====================
|
||||
codequality:
|
||||
stage: metadata
|
||||
allow_failure: true
|
||||
script:
|
||||
- npmci command npm install -g tslint typescript
|
||||
- npmci npm prepare
|
||||
- npmci npm install
|
||||
- npmci command "tslint -c tslint.json ./ts/**/*.ts"
|
||||
tags:
|
||||
- lossless
|
||||
- docker
|
||||
- priv
|
||||
|
||||
trigger:
|
||||
stage: metadata
|
||||
script:
|
||||
- npmci trigger
|
||||
only:
|
||||
- tags
|
||||
tags:
|
||||
- lossless
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
pages:
|
||||
stage: metadata
|
||||
script:
|
||||
- npmci node install lts
|
||||
- npmci command npm install -g @gitzone/tsdoc
|
||||
- npmci npm prepare
|
||||
- npmci npm install
|
||||
- npmci command tsdoc
|
||||
tags:
|
||||
- lossless
|
||||
- docker
|
||||
- notpriv
|
||||
only:
|
||||
- tags
|
||||
artifacts:
|
||||
expire_in: 1 week
|
||||
paths:
|
||||
- public
|
||||
allow_failure: true
|
24
.vscode/launch.json
vendored
24
.vscode/launch.json
vendored
@ -2,10 +2,28 @@
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"command": "npm test",
|
||||
"name": "Run npm test",
|
||||
"name": "current file",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
"args": [
|
||||
"${relativeFile}"
|
||||
],
|
||||
"runtimeArgs": ["-r", "@gitzone/tsrun"],
|
||||
"cwd": "${workspaceRoot}",
|
||||
"protocol": "inspector",
|
||||
"internalConsoleOptions": "openOnSessionStart"
|
||||
},
|
||||
{
|
||||
"name": "test.ts",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"args": [
|
||||
"test/test.ts"
|
||||
],
|
||||
"runtimeArgs": ["-r", "@gitzone/tsrun"],
|
||||
"cwd": "${workspaceRoot}",
|
||||
"protocol": "inspector",
|
||||
"internalConsoleOptions": "openOnSessionStart"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@ -15,7 +15,7 @@
|
||||
"properties": {
|
||||
"projectType": {
|
||||
"type": "string",
|
||||
"enum": ["website", "element", "service", "npm", "wcc"]
|
||||
"enum": ["website", "element", "service", "npm"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
55
changelog.md
55
changelog.md
@ -1,55 +0,0 @@
|
||||
# Changelog
|
||||
|
||||
## 2024-10-13 - 4.0.39 - fix(core)
|
||||
Fix dependencies and update documentation.
|
||||
|
||||
- Ensure package uses the latest dependencies
|
||||
- Reviewed and grouped imports in TypeScript files
|
||||
- Updated readme with advanced usage examples
|
||||
|
||||
## 2024-10-13 - 4.0.38 - fix(dependencies)
|
||||
Update dependencies to latest versions
|
||||
|
||||
- Updated @push.rocks/smartfile to version 11.0.21
|
||||
- Updated @push.rocks/smartpromise to version 4.0.4
|
||||
- Updated @push.rocks/smartstream to version 3.0.46
|
||||
- Updated @push.rocks/smarturl to version 3.1.0
|
||||
- Updated file-type to version 19.5.0
|
||||
- Updated @git.zone/tsbuild to version 2.1.84
|
||||
- Updated @git.zone/tsrun to version 1.2.49
|
||||
- Updated @push.rocks/tapbundle to version 5.3.0
|
||||
|
||||
## 2024-06-08 - 4.0.24 to 4.0.37 - Fixes and Updates
|
||||
Core updates and bug fixes were implemented in versions 4.0.24 through 4.0.37.
|
||||
|
||||
- Repeated core updates and fixes applied consistently across multiple versions.
|
||||
|
||||
## 2024-06-06 - 4.0.22 to 4.0.23 - Descriptions and Fixes Updates
|
||||
Efforts to update documentation and core features.
|
||||
|
||||
- "update description" in 4.0.22
|
||||
- Updates to `tsconfig` and `npmextra.json` were performed.
|
||||
- Ongoing core fixes.
|
||||
|
||||
## 2023-11-06 - 4.0.0 - Major Update with Breaking Changes
|
||||
Introduction of significant updates and breaking changes.
|
||||
|
||||
- Transition to new version 4.0.0 with core updates.
|
||||
- Break in compatibility due to major structural changes with core functionalities.
|
||||
|
||||
## 2023-07-11 - 3.0.6 - Organizational Changes
|
||||
Structural reorganization and updates to the organization schema.
|
||||
|
||||
- Switch to new organizational schema implemented.
|
||||
|
||||
## 2022-04-04 - 3.0.0 - Build Updates and Breaking Changes
|
||||
Major build update introducing breaking changes.
|
||||
|
||||
- Introduction of ESM structure with breaking changes.
|
||||
|
||||
## 2016-01-18 - 0.0.0 to 1.0.0 - Initial Development and Launch
|
||||
Initial software development and establishment of core features.
|
||||
|
||||
- Project set-up including Travis CI integration.
|
||||
- Launch of the first full version with code restructuring.
|
||||
- Added callback support.
|
5
dist_ts/index.d.ts
vendored
5
dist_ts/index.d.ts
vendored
@ -1,4 +1 @@
|
||||
export * from './classes.smartarchive.js';
|
||||
export * from './classes.tartools.js';
|
||||
export * from './classes.ziptools.js';
|
||||
export * from './classes.gziptools.js';
|
||||
export * from './smartarchive.classes.smartarchive';
|
||||
|
@ -1,5 +1,7 @@
|
||||
export * from './classes.smartarchive.js';
|
||||
export * from './classes.tartools.js';
|
||||
export * from './classes.ziptools.js';
|
||||
export * from './classes.gziptools.js';
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxjQUFjLDJCQUEyQixDQUFDO0FBQzFDLGNBQWMsdUJBQXVCLENBQUM7QUFDdEMsY0FBYyx1QkFBdUIsQ0FBQztBQUN0QyxjQUFjLHdCQUF3QixDQUFDIn0=
|
||||
"use strict";
|
||||
function __export(m) {
|
||||
for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p];
|
||||
}
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
__export(require("./smartarchive.classes.smartarchive"));
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiOzs7OztBQUFBLHlEQUFvRCJ9
|
33
dist_ts/smartarchive.classes.smartarchive.d.ts
vendored
33
dist_ts/smartarchive.classes.smartarchive.d.ts
vendored
@ -1,34 +1,5 @@
|
||||
/// <reference types="node" resolution-mode="require"/>
|
||||
import * as plugins from './smartarchive.plugins.js';
|
||||
export declare class SmartArchive {
|
||||
archiveDirectory: string;
|
||||
constructor();
|
||||
/**
|
||||
* extracts an archive from a given url
|
||||
*/
|
||||
extractArchiveFromUrlToFs(urlArg: string, targetDir: string): Promise<void>;
|
||||
/**
|
||||
* extracts an archive from a given filePath on disk
|
||||
* @param filePathArg
|
||||
* @param targetDirArg
|
||||
*/
|
||||
extractArchiveFromFilePathToFs(filePathArg: string, targetDirArg: string): Promise<void>;
|
||||
/**
|
||||
* extracts to Observable
|
||||
* where the Observable is emitting smartfiles
|
||||
*/
|
||||
extractArchiveFromBufferToObservable(bufferArg: Buffer): Promise<plugins.smartrx.rxjs.ReplaySubject<plugins.smartfile.Smartfile>>;
|
||||
extractArchiveWithIntakeAndReplaySubject(): {
|
||||
intake: plugins.smartstream.StreamIntake<Buffer>;
|
||||
replaySubject: plugins.smartrx.rxjs.ReplaySubject<plugins.smartfile.Smartfile>;
|
||||
};
|
||||
/**
|
||||
* extracts to Observable
|
||||
*/
|
||||
extractArchiveFromUrlToObservable(urlArg: string): Promise<plugins.smartrx.rxjs.ReplaySubject<plugins.smartfile.Smartfile>>;
|
||||
extractArchiveFromUrlToStream(): Promise<void>;
|
||||
extractArchiveFromFilePathToStream(): Promise<void>;
|
||||
extractArchiveFromStreamToStream(): Promise<void>;
|
||||
packFromStreamToStream(): Promise<void>;
|
||||
packFromDirPathToStream(): Promise<void>;
|
||||
packFromDirPathToFs(): Promise<void>;
|
||||
extractArchiveFromFilePath(filePathArg: string, targetDir: string): void;
|
||||
}
|
||||
|
File diff suppressed because one or more lines are too long
@ -1,4 +1,13 @@
|
||||
import * as plugins from './smartarchive.plugins.js';
|
||||
export const packageDir = plugins.path.join(plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url), '../');
|
||||
export const nogitDir = plugins.path.join(packageDir, './.nogit');
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic21hcnRhcmNoaXZlLnBhdGhzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vdHMvc21hcnRhcmNoaXZlLnBhdGhzLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sS0FBSyxPQUFPLE1BQU0sMkJBQTJCLENBQUM7QUFFckQsTUFBTSxDQUFDLE1BQU0sVUFBVSxHQUFHLE9BQU8sQ0FBQyxJQUFJLENBQUMsSUFBSSxDQUN6QyxPQUFPLENBQUMsU0FBUyxDQUFDLEdBQUcsQ0FBQyx3QkFBd0IsQ0FBQyxNQUFNLENBQUMsSUFBSSxDQUFDLEdBQUcsQ0FBQyxFQUMvRCxLQUFLLENBQ04sQ0FBQztBQUNGLE1BQU0sQ0FBQyxNQUFNLFFBQVEsR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxVQUFVLEVBQUUsVUFBVSxDQUFDLENBQUMifQ==
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const plugins = __importStar(require("./smartarchive.plugins"));
|
||||
exports.packageDir = plugins.path.join(__dirname, '../');
|
||||
exports.nogitDir = plugins.path.join(__dirname, './.nogit');
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic21hcnRhcmNoaXZlLnBhdGhzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vdHMvc21hcnRhcmNoaXZlLnBhdGhzLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7Ozs7Ozs7OztBQUFBLGdFQUFrRDtBQUVyQyxRQUFBLFVBQVUsR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxTQUFTLEVBQUUsS0FBSyxDQUFDLENBQUM7QUFDakQsUUFBQSxRQUFRLEdBQUcsT0FBTyxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsU0FBUyxFQUFFLFVBQVUsQ0FBQyxDQUFDIn0=
|
17
dist_ts/smartarchive.plugins.d.ts
vendored
17
dist_ts/smartarchive.plugins.d.ts
vendored
@ -1,14 +1,7 @@
|
||||
import * as path from 'path';
|
||||
import path from 'path';
|
||||
export { path };
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smartrequest from '@push.rocks/smartrequest';
|
||||
import * as smartunique from '@push.rocks/smartunique';
|
||||
import * as smartstream from '@push.rocks/smartstream';
|
||||
import * as smartrx from '@push.rocks/smartrx';
|
||||
export { smartfile, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx };
|
||||
import gunzipMaybe from 'gunzip-maybe';
|
||||
import * as smartfile from '@pushrocks/smartfile';
|
||||
import * as smartpath from '@pushrocks/smartpath';
|
||||
export { smartfile, smartpath };
|
||||
import tar from 'tar';
|
||||
import tarStream from 'tar-stream';
|
||||
export { gunzipMaybe, tar, tarStream };
|
||||
export { tar };
|
||||
|
@ -1,19 +1,24 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// node native scope
|
||||
import * as path from 'path';
|
||||
export { path };
|
||||
const path_1 = __importDefault(require("path"));
|
||||
exports.path = path_1.default;
|
||||
// @pushrocks scope
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smartrequest from '@push.rocks/smartrequest';
|
||||
import * as smartunique from '@push.rocks/smartunique';
|
||||
import * as smartstream from '@push.rocks/smartstream';
|
||||
import * as smartrx from '@push.rocks/smartrx';
|
||||
export { smartfile, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx };
|
||||
const smartfile = __importStar(require("@pushrocks/smartfile"));
|
||||
exports.smartfile = smartfile;
|
||||
const smartpath = __importStar(require("@pushrocks/smartpath"));
|
||||
exports.smartpath = smartpath;
|
||||
// third party scope
|
||||
import gunzipMaybe from 'gunzip-maybe';
|
||||
// @ts-ignore
|
||||
import tar from 'tar';
|
||||
import tarStream from 'tar-stream';
|
||||
export { gunzipMaybe, tar, tarStream };
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic21hcnRhcmNoaXZlLnBsdWdpbnMuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9zbWFydGFyY2hpdmUucGx1Z2lucy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxvQkFBb0I7QUFDcEIsT0FBTyxLQUFLLElBQUksTUFBTSxNQUFNLENBQUM7QUFFN0IsT0FBTyxFQUFFLElBQUksRUFBRSxDQUFDO0FBRWhCLG1CQUFtQjtBQUNuQixPQUFPLEtBQUssU0FBUyxNQUFNLHVCQUF1QixDQUFDO0FBQ25ELE9BQU8sS0FBSyxTQUFTLE1BQU0sdUJBQXVCLENBQUM7QUFDbkQsT0FBTyxLQUFLLFlBQVksTUFBTSwwQkFBMEIsQ0FBQztBQUN6RCxPQUFPLEtBQUssWUFBWSxNQUFNLDBCQUEwQixDQUFDO0FBQ3pELE9BQU8sS0FBSyxXQUFXLE1BQU0seUJBQXlCLENBQUM7QUFDdkQsT0FBTyxLQUFLLFdBQVcsTUFBTSx5QkFBeUIsQ0FBQztBQUN2RCxPQUFPLEtBQUssT0FBTyxNQUFNLHFCQUFxQixDQUFDO0FBRS9DLE9BQU8sRUFBRSxTQUFTLEVBQUUsU0FBUyxFQUFFLFlBQVksRUFBRSxZQUFZLEVBQUUsV0FBVyxFQUFFLFdBQVcsRUFBRSxPQUFPLEVBQUUsQ0FBQztBQUUvRixvQkFBb0I7QUFDcEIsT0FBTyxXQUFXLE1BQU0sY0FBYyxDQUFDO0FBRXZDLGFBQWE7QUFDYixPQUFPLEdBQUcsTUFBTSxLQUFLLENBQUM7QUFDdEIsT0FBTyxTQUFTLE1BQU0sWUFBWSxDQUFDO0FBRW5DLE9BQU8sRUFBRSxXQUFXLEVBQUUsR0FBRyxFQUFFLFNBQVMsRUFBRSxDQUFDIn0=
|
||||
const tar_1 = __importDefault(require("tar"));
|
||||
exports.tar = tar_1.default;
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic21hcnRhcmNoaXZlLnBsdWdpbnMuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9zbWFydGFyY2hpdmUucGx1Z2lucy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiOzs7Ozs7Ozs7Ozs7QUFBQSxvQkFBb0I7QUFDcEIsZ0RBQXdCO0FBRWYsZUFGRixjQUFJLENBRUU7QUFFYixtQkFBbUI7QUFDbkIsZ0VBQWtEO0FBR3pDLDhCQUFTO0FBRmxCLGdFQUFrRDtBQUU5Qiw4QkFBUztBQUU3QixvQkFBb0I7QUFDcEIsOENBQXNCO0FBRWIsY0FGRixhQUFHLENBRUUifQ==
|
6
license
6
license
@ -1,10 +1,6 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 Task Venture Capital GmbH
|
||||
|
||||
Includes work under MIT license with copyrights:
|
||||
* Copyright (c) 2017 by Jan Boelsche (jan@lagomorph.de)
|
||||
* Copyright 2011 by antimatter15 (antimatter15@gmail.com)
|
||||
Copyright (c) 2016 Lossless GmbH
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -6,28 +6,12 @@
|
||||
"gitzone": {
|
||||
"projectType": "npm",
|
||||
"module": {
|
||||
"githost": "code.foss.global",
|
||||
"gitscope": "push.rocks",
|
||||
"githost": "gitlab.com",
|
||||
"gitscope": "pushrocks",
|
||||
"gitrepo": "smartarchive",
|
||||
"description": "A library for working with archive files, providing utilities for compressing and decompressing data.",
|
||||
"npmPackagename": "@push.rocks/smartarchive",
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"archive",
|
||||
"compression",
|
||||
"decompression",
|
||||
"zip",
|
||||
"tar",
|
||||
"gzip",
|
||||
"bzip2",
|
||||
"file extraction",
|
||||
"file creation",
|
||||
"data analysis",
|
||||
"file stream"
|
||||
]
|
||||
"shortDescription": "work with archives",
|
||||
"npmPackagename": "@pushrocks/smartarchive",
|
||||
"license": "MIT"
|
||||
}
|
||||
},
|
||||
"tsdoc": {
|
||||
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
||||
}
|
||||
}
|
1516
package-lock.json
generated
Normal file
1516
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
61
package.json
61
package.json
@ -1,46 +1,37 @@
|
||||
{
|
||||
"name": "@push.rocks/smartarchive",
|
||||
"version": "4.0.39",
|
||||
"description": "A library for working with archive files, providing utilities for compressing and decompressing data.",
|
||||
"name": "@pushrocks/smartarchive",
|
||||
"version": "1.0.12",
|
||||
"description": "work with archives",
|
||||
"main": "dist_ts/index.js",
|
||||
"typings": "dist_ts/index.d.ts",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"test": "(tstest test/ --web)",
|
||||
"build": "tsbuild --web --allowimplicitany",
|
||||
"buildDocs": "tsdoc"
|
||||
"test": "tstest test/",
|
||||
"build": "tsbuild"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://code.foss.global/push.rocks/smartarchive.git"
|
||||
"url": "git+https://github.com/pushrocks/smartarchive.git"
|
||||
},
|
||||
"author": "Lossless GmbH",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/pushrocks/smartarchive/issues"
|
||||
},
|
||||
"homepage": "https://code.foss.global/push.rocks/smartarchive",
|
||||
"homepage": "https://github.com/pushrocks/smartarchive#readme",
|
||||
"dependencies": {
|
||||
"@push.rocks/smartdelay": "^3.0.5",
|
||||
"@push.rocks/smartfile": "^11.0.21",
|
||||
"@push.rocks/smartpath": "^5.0.18",
|
||||
"@push.rocks/smartpromise": "^4.0.4",
|
||||
"@push.rocks/smartrequest": "^2.0.22",
|
||||
"@push.rocks/smartrx": "^3.0.7",
|
||||
"@push.rocks/smartstream": "^3.0.46",
|
||||
"@push.rocks/smartunique": "^3.0.9",
|
||||
"@push.rocks/smarturl": "^3.1.0",
|
||||
"@types/tar-stream": "^3.1.3",
|
||||
"fflate": "^0.8.2",
|
||||
"file-type": "^19.5.0",
|
||||
"tar-stream": "^3.1.7",
|
||||
"through": "^2.3.8"
|
||||
"@pushrocks/smartfile": "^7.0.11",
|
||||
"@pushrocks/smartpath": "^4.0.1",
|
||||
"@pushrocks/smartrequest": "^1.1.47",
|
||||
"@pushrocks/smartunique": "^3.0.3",
|
||||
"@types/tar": "^4.0.3",
|
||||
"tar": "^6.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@git.zone/tsbuild": "^2.1.84",
|
||||
"@git.zone/tsrun": "^1.2.49",
|
||||
"@git.zone/tstest": "^1.0.90",
|
||||
"@push.rocks/tapbundle": "^5.3.0"
|
||||
"@gitzone/tsbuild": "^2.1.22",
|
||||
"@gitzone/tstest": "^1.0.28",
|
||||
"@pushrocks/tapbundle": "^3.2.1",
|
||||
"tslint": "^6.1.0",
|
||||
"tslint-config-prettier": "^1.18.0"
|
||||
},
|
||||
"private": false,
|
||||
"files": [
|
||||
@ -54,21 +45,5 @@
|
||||
"cli.js",
|
||||
"npmextra.json",
|
||||
"readme.md"
|
||||
],
|
||||
"browserslist": [
|
||||
"last 1 chrome versions"
|
||||
],
|
||||
"keywords": [
|
||||
"archive",
|
||||
"compression",
|
||||
"decompression",
|
||||
"zip",
|
||||
"tar",
|
||||
"gzip",
|
||||
"bzip2",
|
||||
"file extraction",
|
||||
"file creation",
|
||||
"data analysis",
|
||||
"file stream"
|
||||
]
|
||||
}
|
||||
|
6598
pnpm-lock.yaml
generated
6598
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@ -1 +0,0 @@
|
||||
|
301
readme.md
301
readme.md
@ -1,282 +1,49 @@
|
||||
# @push.rocks/smartarchive
|
||||
# @pushrocks/smartarchive
|
||||
work with archives
|
||||
|
||||
`@push.rocks/smartarchive` is a powerful library designed for managing archive files. It provides utilities for compressing and decompressing data in various formats such as zip, tar, gzip, and bzip2. This library aims to simplify the process of handling archive files, making it an ideal choice for projects that require manipulation of archived data.
|
||||
## Availabililty and Links
|
||||
* [npmjs.org (npm package)](https://www.npmjs.com/package/@pushrocks/smartarchive)
|
||||
* [gitlab.com (source)](https://gitlab.com/pushrocks/smartarchive)
|
||||
* [github.com (source mirror)](https://github.com/pushrocks/smartarchive)
|
||||
* [docs (typedoc)](https://pushrocks.gitlab.io/smartarchive/)
|
||||
|
||||
## Install
|
||||
|
||||
To install `@push.rocks/smartarchive`, you can either use npm or yarn. Run one of the following commands in your project directory:
|
||||
|
||||
```shell
|
||||
npm install @push.rocks/smartarchive --save
|
||||
```
|
||||
|
||||
or if you prefer yarn:
|
||||
|
||||
```shell
|
||||
yarn add @push.rocks/smartarchive
|
||||
```
|
||||
|
||||
This will add `@push.rocks/smartarchive` to your project's dependencies.
|
||||
## Status for master
|
||||
[](https://gitlab.com/pushrocks/smartarchive/commits/master)
|
||||
[](https://gitlab.com/pushrocks/smartarchive/commits/master)
|
||||
[](https://www.npmjs.com/package/@pushrocks/smartarchive)
|
||||
[](https://snyk.io/test/npm/@pushrocks/smartarchive)
|
||||
[](https://nodejs.org/dist/latest-v10.x/docs/api/)
|
||||
[](https://nodejs.org/dist/latest-v10.x/docs/api/)
|
||||
[](https://prettier.io/)
|
||||
|
||||
## Usage
|
||||
`@push.rocks/smartarchive` provides an easy-to-use API for extracting, creating, and analyzing archive files. Below, we'll cover how to get started and explore various features of the module.
|
||||
|
||||
### Importing SmartArchive
|
||||
Use TypeScript for best in class instellisense.
|
||||
|
||||
First, import `SmartArchive` from `@push.rocks/smartarchive` using ESM syntax:
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
```javascript
|
||||
import * as smartarchive from 'smartarchive';
|
||||
smartarchive
|
||||
.get({
|
||||
from: 'https://example.com/example.zip',
|
||||
toPath: '/some/local/absolute/path'
|
||||
})
|
||||
.then(/*...*/);
|
||||
```
|
||||
|
||||
### Extracting Archive Files
|
||||
For further information read the linked docs at the top of this README.
|
||||
|
||||
You can extract archive files from different sources using `SmartArchive.fromArchiveUrl`, `SmartArchive.fromArchiveFile`, and `SmartArchive.fromArchiveStream`. Here's an example of extracting an archive from a URL:
|
||||
> MIT licensed | **©** [Lossless GmbH](https://lossless.gmbh)
|
||||
> | By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy.html)
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
[](https://push.rocks)
|
||||
|
||||
async function extractArchiveFromURL() {
|
||||
const url = 'https://example.com/archive.zip';
|
||||
const targetDir = '/path/to/extract';
|
||||
## Contribution
|
||||
|
||||
const archive = await SmartArchive.fromArchiveUrl(url);
|
||||
await archive.exportToFs(targetDir);
|
||||
We are always happy for code contributions. If you are not the code contributing type that is ok. Still, maintaining Open Source repositories takes considerable time and thought. If you like the quality of what we do and our modules are useful to you we would appreciate a little monthly contribution: You can [contribute one time](https://lossless.link/contribute-onetime) or [contribute monthly](https://lossless.link/contribute). :)
|
||||
|
||||
console.log('Archive extracted successfully.');
|
||||
}
|
||||
For further information read the linked docs at the top of this readme.
|
||||
|
||||
extractArchiveFromURL();
|
||||
```
|
||||
> MIT licensed | **©** [Lossless GmbH](https://lossless.gmbh)
|
||||
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy)
|
||||
|
||||
### Extracting an Archive from a File
|
||||
|
||||
Similarly, you can extract an archive from a local file:
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
async function extractArchiveFromFile() {
|
||||
const filePath = '/path/to/archive.zip';
|
||||
const targetDir = '/path/to/extract';
|
||||
|
||||
const archive = await SmartArchive.fromArchiveFile(filePath);
|
||||
await archive.exportToFs(targetDir);
|
||||
|
||||
console.log('Archive extracted successfully.');
|
||||
}
|
||||
|
||||
extractArchiveFromFile();
|
||||
```
|
||||
|
||||
### Stream-Based Extraction
|
||||
|
||||
For larger files, you might prefer a streaming approach to prevent high memory consumption. Here’s an example:
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
import { createReadStream } from 'fs';
|
||||
|
||||
async function extractArchiveUsingStream() {
|
||||
const archiveStream = createReadStream('/path/to/archive.zip');
|
||||
const archive = await SmartArchive.fromArchiveStream(archiveStream);
|
||||
const extractionStream = await archive.exportToStreamOfStreamFiles();
|
||||
|
||||
extractionStream.pipe(createWriteStream('/path/to/destination'));
|
||||
}
|
||||
|
||||
extractArchiveUsingStream();
|
||||
```
|
||||
|
||||
### Analyzing Archive Files
|
||||
|
||||
Sometimes, you may need to inspect the contents of an archive before extracting it. The following example shows how to analyze an archive:
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
async function analyzeArchive() {
|
||||
const filePath = '/path/to/archive.zip';
|
||||
|
||||
const archive = await SmartArchive.fromArchiveFile(filePath);
|
||||
const analysisResult = await archive.analyzeContent();
|
||||
|
||||
console.log(analysisResult); // Outputs details about the archive content
|
||||
}
|
||||
|
||||
analyzeArchive();
|
||||
```
|
||||
|
||||
### Creating Archive Files
|
||||
|
||||
Creating an archive file is straightforward. Here we demonstrate creating a tar.gz archive:
|
||||
|
||||
```typescript
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
async function createTarGzArchive() {
|
||||
const archive = new SmartArchive();
|
||||
|
||||
// Add directories and files
|
||||
archive.addedDirectories.push('/path/to/directory1');
|
||||
archive.addedFiles.push('/path/to/file1.txt');
|
||||
|
||||
// Export as tar.gz
|
||||
const tarGzStream = await archive.exportToTarGzStream();
|
||||
|
||||
// Save to filesystem or handle as needed
|
||||
tarGzStream.pipe(createWriteStream('/path/to/destination.tar.gz'));
|
||||
}
|
||||
|
||||
createTarGzArchive();
|
||||
```
|
||||
|
||||
### Stream Operations
|
||||
|
||||
Here's an example of using `smartarchive`'s streaming capabilities:
|
||||
|
||||
```typescript
|
||||
import { createReadStream, createWriteStream } from 'fs';
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
async function extractArchiveUsingStreams() {
|
||||
const archiveStream = createReadStream('/path/to/archive.zip');
|
||||
const archive = await SmartArchive.fromArchiveStream(archiveStream);
|
||||
const extractionStream = await archive.exportToStreamOfStreamFiles();
|
||||
|
||||
extractionStream.pipe(createWriteStream('/path/to/extracted'));
|
||||
}
|
||||
|
||||
extractArchiveUsingStreams();
|
||||
```
|
||||
|
||||
### Advanced Decompression Usage
|
||||
|
||||
`smartarchive` supports multiple compression formats. It also provides detailed control over the decompression processes:
|
||||
|
||||
- For ZIP files, `ZipTools` handles decompression using the `fflate` library.
|
||||
- For TAR files, `TarTools` uses `tar-stream`.
|
||||
- For GZIP files, `GzipTools` provides a `CompressGunzipTransform` and `DecompressGunzipTransform`.
|
||||
- For BZIP2 files, `Bzip2Tools` utilizes custom streaming decompression.
|
||||
|
||||
Example: Working with a GZIP-compressed archive:
|
||||
|
||||
```typescript
|
||||
import { createReadStream, createWriteStream } from 'fs';
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
async function decompressGzipArchive() {
|
||||
const filePath = '/path/to/archive.gz';
|
||||
const targetDir = '/path/to/extract';
|
||||
|
||||
const archive = await SmartArchive.fromArchiveFile(filePath);
|
||||
await archive.exportToFs(targetDir);
|
||||
|
||||
console.log('GZIP archive decompressed successfully.');
|
||||
}
|
||||
|
||||
decompressGzipArchive();
|
||||
```
|
||||
|
||||
### Advancing with Custom Decompression Streams
|
||||
|
||||
You can inject custom decompression streams where needed:
|
||||
|
||||
```typescript
|
||||
import { createReadStream, createWriteStream } from 'fs';
|
||||
import { SmartArchive, GzipTools } from '@push.rocks/smartarchive';
|
||||
|
||||
async function customDecompression() {
|
||||
const filePath = '/path/to/archive.gz';
|
||||
const targetDir = '/path/to/extract';
|
||||
|
||||
const archive = await SmartArchive.fromArchiveFile(filePath);
|
||||
const gzipTools = new GzipTools();
|
||||
const decompressionStream = gzipTools.getDecompressionStream();
|
||||
|
||||
const archiveStream = await archive.getArchiveStream();
|
||||
archiveStream.pipe(decompressionStream).pipe(createWriteStream(targetDir));
|
||||
|
||||
console.log('Custom GZIP decompression successful.');
|
||||
}
|
||||
|
||||
customDecompression();
|
||||
```
|
||||
|
||||
### Custom Pack and Unpack Tar
|
||||
|
||||
When dealing with tar archives, you may need to perform custom packing and unpacking:
|
||||
|
||||
```typescript
|
||||
import { SmartArchive, TarTools } from '@push.rocks/smartarchive';
|
||||
import { createWriteStream } from 'fs';
|
||||
|
||||
async function customTarOperations() {
|
||||
const tarTools = new TarTools();
|
||||
|
||||
// Packing a directory into a tar stream
|
||||
const packStream = await tarTools.packDirectory('/path/to/directory');
|
||||
packStream.pipe(createWriteStream('/path/to/archive.tar'));
|
||||
|
||||
// Extracting files from a tar stream
|
||||
const extractStream = tarTools.getDecompressionStream();
|
||||
createReadStream('/path/to/archive.tar').pipe(extractStream).on('entry', (header, stream, next) => {
|
||||
const writeStream = createWriteStream(`/path/to/extract/${header.name}`);
|
||||
stream.pipe(writeStream);
|
||||
stream.on('end', next);
|
||||
});
|
||||
}
|
||||
|
||||
customTarOperations();
|
||||
```
|
||||
|
||||
### Extract and Analyze All-in-One
|
||||
|
||||
To extract and simultaneously analyze archive content:
|
||||
|
||||
```typescript
|
||||
import { createReadStream, createWriteStream } from 'fs';
|
||||
import { SmartArchive } from '@push.rocks/smartarchive';
|
||||
|
||||
async function extractAndAnalyze() {
|
||||
const filePath = '/path/to/archive.zip';
|
||||
const targetDir = '/path/to/extract';
|
||||
|
||||
const archive = await SmartArchive.fromArchiveFile(filePath);
|
||||
const analyzedStream = archive.archiveAnalyzer.getAnalyzedStream();
|
||||
const extractionStream = await archive.exportToStreamOfStreamFiles();
|
||||
|
||||
analyzedStream.pipe(extractionStream).pipe(createWriteStream(targetDir));
|
||||
|
||||
analyzedStream.on('data', (chunk) => {
|
||||
console.log(JSON.stringify(chunk, null, 2));
|
||||
});
|
||||
}
|
||||
|
||||
extractAndAnalyze();
|
||||
```
|
||||
|
||||
### Final Words
|
||||
|
||||
These examples demonstrate various use cases for `@push.rocks/smartarchive`. Depending on your specific project requirements, you can adapt these examples to suit your needs. Always refer to the latest documentation for the most current information and methods available in `@push.rocks/smartarchive`.
|
||||
|
||||
For more information and API references, check the official [`@push.rocks/smartarchive` GitHub repository](https://code.foss.global/push.rocks/smartarchive).
|
||||
|
||||
## License and Legal Information
|
||||
|
||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
||||
|
||||
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||
|
||||
### Trademarks
|
||||
|
||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
|
||||
|
||||
### Company Information
|
||||
|
||||
Task Venture Capital GmbH
|
||||
Registered at District court Bremen HRB 35230 HB, Germany
|
||||
|
||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
||||
|
||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||
[](https://maintainedby.lossless.com)
|
||||
|
@ -1,13 +0,0 @@
|
||||
import * as path from 'path';
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartrequest from '@push.rocks/smartrequest';
|
||||
import * as smartstream from '@push.rocks/smartstream';
|
||||
|
||||
export {
|
||||
path,
|
||||
smartpath,
|
||||
smartfile,
|
||||
smartrequest,
|
||||
smartstream,
|
||||
}
|
60
test/test.ts
60
test/test.ts
@ -1,50 +1,50 @@
|
||||
import { tap, expect } from '@push.rocks/tapbundle';
|
||||
import { tap, expect } from '@pushrocks/tapbundle';
|
||||
|
||||
import * as plugins from './plugins.js';
|
||||
import * as path from 'path';
|
||||
import * as smartfile from '@pushrocks/smartfile';
|
||||
import * as smartrequest from '@pushrocks/smartrequest';
|
||||
|
||||
const testPaths = {
|
||||
nogitDir: plugins.path.join(
|
||||
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
|
||||
'../.nogit/'
|
||||
),
|
||||
remoteDir: plugins.path.join(
|
||||
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
|
||||
'../.nogit/remote'
|
||||
),
|
||||
const testPlugins = {
|
||||
path,
|
||||
smartfile,
|
||||
smartrequest
|
||||
};
|
||||
|
||||
import * as smartarchive from '../ts/index.js';
|
||||
const testPaths = {
|
||||
nogitDir: testPlugins.path.join(__dirname, '../.nogit/'),
|
||||
remoteDir: testPlugins.path.join(__dirname, '../.nogit/remote')
|
||||
};
|
||||
|
||||
import * as smartarchive from '../ts/index';
|
||||
|
||||
tap.preTask('should prepare .nogit dir', async () => {
|
||||
await plugins.smartfile.fs.ensureDir(testPaths.remoteDir);
|
||||
await testPlugins.smartfile.fs.ensureDir(testPaths.remoteDir);
|
||||
});
|
||||
|
||||
tap.preTask('should prepare downloads', async (tools) => {
|
||||
tap.preTask('should prepare downloads', async tools => {
|
||||
const downloadedFile: Buffer = (
|
||||
await plugins.smartrequest.getBinary(
|
||||
'https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz'
|
||||
await testPlugins.smartrequest.getBinary(
|
||||
'https://verdaccio.lossless.one/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz'
|
||||
)
|
||||
).body;
|
||||
await plugins.smartfile.memory.toFs(
|
||||
await testPlugins.smartfile.memory.toFs(
|
||||
downloadedFile,
|
||||
plugins.path.join(testPaths.nogitDir, 'test.tgz')
|
||||
testPlugins.path.join(testPaths.nogitDir, 'test.tgz')
|
||||
);
|
||||
});
|
||||
|
||||
tap.test('should extract existing files on disk', async () => {
|
||||
const testSmartarchive = await smartarchive.SmartArchive.fromArchiveUrl(
|
||||
'https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz'
|
||||
const testSmartarchive = new smartarchive.SmartArchive();
|
||||
await testSmartarchive.extractArchiveFromFilePath(
|
||||
testPlugins.path.join(testPaths.nogitDir, 'test.tgz'),
|
||||
testPlugins.path.join(testPaths.nogitDir)
|
||||
);
|
||||
await testSmartarchive.exportToFs(testPaths.nogitDir);
|
||||
});
|
||||
|
||||
tap.skip.test('should extract a b2zip', async () => {
|
||||
const dataUrl = 'https://daten.offeneregister.de/de_companies_ocdata.jsonl.bz2';
|
||||
const testArchive = await smartarchive.SmartArchive.fromArchiveUrl(dataUrl);
|
||||
await testArchive.exportToFs(
|
||||
plugins.path.join(testPaths.nogitDir, 'de_companies_ocdata.jsonl'),
|
||||
'data.jsonl',
|
||||
);
|
||||
})
|
||||
tap.test('should download a package from the registry', async () => {
|
||||
const testSmartarchive = new smartarchive.SmartArchive();
|
||||
await testSmartarchive.extractArchiveFromUrl('https://verdaccio.lossless.one/@pushrocks%2fsmartfile/-/smartfile-7.0.11.tgz', testPaths.remoteDir);
|
||||
|
||||
await tap.start();
|
||||
});
|
||||
|
||||
tap.start();
|
||||
|
@ -1,8 +0,0 @@
|
||||
/**
|
||||
* autocreated commitinfo by @push.rocks/commitinfo
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartarchive',
|
||||
version: '4.0.39',
|
||||
description: 'A library for working with archive files, providing utilities for compressing and decompressing data.'
|
||||
}
|
@ -1,41 +0,0 @@
|
||||
var BITMASK = [0, 0x01, 0x03, 0x07, 0x0F, 0x1F, 0x3F, 0x7F, 0xFF];
|
||||
|
||||
// returns a function that reads bits.
|
||||
// takes a buffer iterator as input
|
||||
export function bitIterator(nextBuffer: () => Buffer) {
|
||||
var bit = 0, byte = 0;
|
||||
var bytes = nextBuffer();
|
||||
var f = function(n) {
|
||||
if (n === null && bit != 0) { // align to byte boundary
|
||||
bit = 0
|
||||
byte++;
|
||||
return;
|
||||
}
|
||||
var result = 0;
|
||||
while(n > 0) {
|
||||
if (byte >= bytes.length) {
|
||||
byte = 0;
|
||||
bytes = nextBuffer();
|
||||
}
|
||||
var left = 8 - bit;
|
||||
if (bit === 0 && n > 0)
|
||||
// @ts-ignore
|
||||
f.bytesRead++;
|
||||
if (n >= left) {
|
||||
result <<= left;
|
||||
result |= (BITMASK[left] & bytes[byte++]);
|
||||
bit = 0;
|
||||
n -= left;
|
||||
} else {
|
||||
result <<= n;
|
||||
result |= ((bytes[byte] & (BITMASK[n] << (8 - n - bit))) >> (8 - n - bit));
|
||||
bit += n;
|
||||
n = 0;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
// @ts-ignore
|
||||
f.bytesRead = 0;
|
||||
return f;
|
||||
};
|
@ -1,335 +0,0 @@
|
||||
export class Bzip2Error extends Error {
|
||||
public name: string = 'Bzip2Error';
|
||||
public message: string;
|
||||
public stack = (new Error()).stack;
|
||||
|
||||
constructor(messageArg: string) {
|
||||
super();
|
||||
this.message = messageArg;
|
||||
}
|
||||
}
|
||||
|
||||
var messageArg = {
|
||||
Error: function(message) {throw new Bzip2Error(message);}
|
||||
};
|
||||
|
||||
export class Bzip2 {
|
||||
public Bzip2Error = Bzip2Error;
|
||||
public crcTable =
|
||||
[
|
||||
0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9,
|
||||
0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005,
|
||||
0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61,
|
||||
0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd,
|
||||
0x4c11db70, 0x48d0c6c7, 0x4593e01e, 0x4152fda9,
|
||||
0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75,
|
||||
0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011,
|
||||
0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd,
|
||||
0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039,
|
||||
0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5,
|
||||
0xbe2b5b58, 0xbaea46ef, 0xb7a96036, 0xb3687d81,
|
||||
0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d,
|
||||
0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49,
|
||||
0xc7361b4c, 0xc3f706fb, 0xceb42022, 0xca753d95,
|
||||
0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1,
|
||||
0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d,
|
||||
0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae,
|
||||
0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072,
|
||||
0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16,
|
||||
0x018aeb13, 0x054bf6a4, 0x0808d07d, 0x0cc9cdca,
|
||||
0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde,
|
||||
0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02,
|
||||
0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066,
|
||||
0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba,
|
||||
0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e,
|
||||
0xbfa1b04b, 0xbb60adfc, 0xb6238b25, 0xb2e29692,
|
||||
0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6,
|
||||
0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a,
|
||||
0xe0b41de7, 0xe4750050, 0xe9362689, 0xedf73b3e,
|
||||
0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2,
|
||||
0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686,
|
||||
0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a,
|
||||
0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637,
|
||||
0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb,
|
||||
0x4f040d56, 0x4bc510e1, 0x46863638, 0x42472b8f,
|
||||
0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53,
|
||||
0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47,
|
||||
0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b,
|
||||
0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff,
|
||||
0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623,
|
||||
0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60, 0xfc6c70d7,
|
||||
0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b,
|
||||
0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f,
|
||||
0xc423cd6a, 0xc0e2d0dd, 0xcda1f604, 0xc960ebb3,
|
||||
0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7,
|
||||
0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b,
|
||||
0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f,
|
||||
0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3,
|
||||
0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640,
|
||||
0x4e8ee645, 0x4a4ffbf2, 0x470cdd2b, 0x43cdc09c,
|
||||
0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8,
|
||||
0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24,
|
||||
0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30,
|
||||
0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec,
|
||||
0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088,
|
||||
0x2497d08d, 0x2056cd3a, 0x2d15ebe3, 0x29d4f654,
|
||||
0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0,
|
||||
0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c,
|
||||
0xe3a1cbc1, 0xe760d676, 0xea23f0af, 0xeee2ed18,
|
||||
0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4,
|
||||
0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0,
|
||||
0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c,
|
||||
0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668,
|
||||
0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4
|
||||
];
|
||||
|
||||
array = function(bytes) {
|
||||
var bit = 0, byte = 0;
|
||||
var BITMASK = [0, 0x01, 0x03, 0x07, 0x0F, 0x1F, 0x3F, 0x7F, 0xFF ];
|
||||
return function(n) {
|
||||
var result = 0;
|
||||
while(n > 0) {
|
||||
var left = 8 - bit;
|
||||
if (n >= left) {
|
||||
result <<= left;
|
||||
result |= (BITMASK[left] & bytes[byte++]);
|
||||
bit = 0;
|
||||
n -= left;
|
||||
} else {
|
||||
result <<= n;
|
||||
result |= ((bytes[byte] & (BITMASK[n] << (8 - n - bit))) >> (8 - n - bit));
|
||||
bit += n;
|
||||
n = 0;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
simple = function(srcbuffer, stream) {
|
||||
var bits = this.array(srcbuffer);
|
||||
var size = this.header(bits);
|
||||
var ret = false;
|
||||
var bufsize = 100000 * size;
|
||||
var buf = new Int32Array(bufsize);
|
||||
|
||||
do {
|
||||
ret = this.decompress(bits, stream, buf, bufsize);
|
||||
} while(!ret);
|
||||
}
|
||||
|
||||
header = function(bits) {
|
||||
this.byteCount = new Int32Array(256);
|
||||
this.symToByte = new Uint8Array(256);
|
||||
this.mtfSymbol = new Int32Array(256);
|
||||
this.selectors = new Uint8Array(0x8000);
|
||||
|
||||
if (bits(8*3) != 4348520) messageArg.Error("No magic number found");
|
||||
|
||||
var i = bits(8) - 48;
|
||||
if (i < 1 || i > 9) messageArg.Error("Not a BZIP archive");
|
||||
return i;
|
||||
};
|
||||
|
||||
decompress = function(bits, stream, buf, bufsize, streamCRC) {
|
||||
var MAX_HUFCODE_BITS = 20;
|
||||
var MAX_SYMBOLS = 258;
|
||||
var SYMBOL_RUNA = 0;
|
||||
var SYMBOL_RUNB = 1;
|
||||
var GROUP_SIZE = 50;
|
||||
var crc = 0 ^ (-1);
|
||||
|
||||
for(var h = '', i = 0; i < 6; i++) h += bits(8).toString(16);
|
||||
if (h == "177245385090") {
|
||||
var finalCRC = bits(32)|0;
|
||||
if (finalCRC !== streamCRC) messageArg.Error("Error in bzip2: crc32 do not match");
|
||||
// align stream to byte
|
||||
bits(null);
|
||||
return null; // reset streamCRC for next call
|
||||
}
|
||||
if (h != "314159265359") messageArg.Error("eek not valid bzip data");
|
||||
var crcblock = bits(32)|0; // CRC code
|
||||
if (bits(1)) messageArg.Error("unsupported obsolete version");
|
||||
var origPtr = bits(24);
|
||||
if (origPtr > bufsize) messageArg.Error("Initial position larger than buffer size");
|
||||
var t = bits(16);
|
||||
var symTotal = 0;
|
||||
for (i = 0; i < 16; i++) {
|
||||
if (t & (1 << (15 - i))) {
|
||||
var k = bits(16);
|
||||
for(j = 0; j < 16; j++) {
|
||||
if (k & (1 << (15 - j))) {
|
||||
this.symToByte[symTotal++] = (16 * i) + j;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var groupCount = bits(3);
|
||||
if (groupCount < 2 || groupCount > 6) messageArg.Error("another error");
|
||||
var nSelectors = bits(15);
|
||||
if (nSelectors == 0) messageArg.Error("meh");
|
||||
for(var i = 0; i < groupCount; i++) this.mtfSymbol[i] = i;
|
||||
|
||||
for(var i = 0; i < nSelectors; i++) {
|
||||
for(var j = 0; bits(1); j++) if (j >= groupCount) messageArg.Error("whoops another error");
|
||||
var uc = this.mtfSymbol[j];
|
||||
for(var k: any = j-1; k>=0; k--) {
|
||||
this.mtfSymbol[k+1] = this.mtfSymbol[k];
|
||||
}
|
||||
this.mtfSymbol[0] = uc;
|
||||
this.selectors[i] = uc;
|
||||
}
|
||||
|
||||
var symCount = symTotal + 2;
|
||||
var groups = [];
|
||||
var length = new Uint8Array(MAX_SYMBOLS),
|
||||
temp = new Uint16Array(MAX_HUFCODE_BITS+1);
|
||||
|
||||
var hufGroup;
|
||||
|
||||
for(var j = 0; j < groupCount; j++) {
|
||||
t = bits(5); //lengths
|
||||
for(var i = 0; i < symCount; i++) {
|
||||
while(true){
|
||||
if (t < 1 || t > MAX_HUFCODE_BITS) messageArg.Error("I gave up a while ago on writing error messages");
|
||||
if (!bits(1)) break;
|
||||
if (!bits(1)) t++;
|
||||
else t--;
|
||||
}
|
||||
length[i] = t;
|
||||
}
|
||||
var minLen, maxLen;
|
||||
minLen = maxLen = length[0];
|
||||
for(var i = 1; i < symCount; i++) {
|
||||
if (length[i] > maxLen) maxLen = length[i];
|
||||
else if (length[i] < minLen) minLen = length[i];
|
||||
}
|
||||
hufGroup = groups[j] = {};
|
||||
hufGroup.permute = new Int32Array(MAX_SYMBOLS);
|
||||
hufGroup.limit = new Int32Array(MAX_HUFCODE_BITS + 1);
|
||||
hufGroup.base = new Int32Array(MAX_HUFCODE_BITS + 1);
|
||||
|
||||
hufGroup.minLen = minLen;
|
||||
hufGroup.maxLen = maxLen;
|
||||
var base = hufGroup.base;
|
||||
var limit = hufGroup.limit;
|
||||
var pp = 0;
|
||||
for(var i: number = minLen; i <= maxLen; i++)
|
||||
for(var t: any = 0; t < symCount; t++)
|
||||
if (length[t] == i) hufGroup.permute[pp++] = t;
|
||||
for(i = minLen; i <= maxLen; i++) temp[i] = limit[i] = 0;
|
||||
for(i = 0; i < symCount; i++) temp[length[i]]++;
|
||||
pp = t = 0;
|
||||
for(i = minLen; i < maxLen; i++) {
|
||||
pp += temp[i];
|
||||
limit[i] = pp - 1;
|
||||
pp <<= 1;
|
||||
base[i+1] = pp - (t += temp[i]);
|
||||
}
|
||||
limit[maxLen] = pp + temp[maxLen] - 1;
|
||||
base[minLen] = 0;
|
||||
}
|
||||
|
||||
for(var i = 0; i < 256; i++) {
|
||||
this.mtfSymbol[i] = i;
|
||||
this.byteCount[i] = 0;
|
||||
}
|
||||
var runPos, count, symCount: number, selector;
|
||||
runPos = count = symCount = selector = 0;
|
||||
while(true) {
|
||||
if (!(symCount--)) {
|
||||
symCount = GROUP_SIZE - 1;
|
||||
if (selector >= nSelectors) messageArg.Error("meow i'm a kitty, that's an error");
|
||||
hufGroup = groups[this.selectors[selector++]];
|
||||
base = hufGroup.base;
|
||||
limit = hufGroup.limit;
|
||||
}
|
||||
i = hufGroup.minLen;
|
||||
j = bits(i);
|
||||
while(true) {
|
||||
if (i > hufGroup.maxLen) messageArg.Error("rawr i'm a dinosaur");
|
||||
if (j <= limit[i]) break;
|
||||
i++;
|
||||
j = (j << 1) | bits(1);
|
||||
}
|
||||
j -= base[i];
|
||||
if (j < 0 || j >= MAX_SYMBOLS) messageArg.Error("moo i'm a cow");
|
||||
var nextSym = hufGroup.permute[j];
|
||||
if (nextSym == SYMBOL_RUNA || nextSym == SYMBOL_RUNB) {
|
||||
if (!runPos){
|
||||
runPos = 1;
|
||||
t = 0;
|
||||
}
|
||||
if (nextSym == SYMBOL_RUNA) t += runPos;
|
||||
else t += 2 * runPos;
|
||||
runPos <<= 1;
|
||||
continue;
|
||||
}
|
||||
if (runPos) {
|
||||
runPos = 0;
|
||||
if (count + t > bufsize) messageArg.Error("Boom.");
|
||||
uc = this.symToByte[this.mtfSymbol[0]];
|
||||
this.byteCount[uc] += t;
|
||||
while(t--) buf[count++] = uc;
|
||||
}
|
||||
if (nextSym > symTotal) break;
|
||||
if (count >= bufsize) messageArg.Error("I can't think of anything. Error");
|
||||
i = nextSym - 1;
|
||||
uc = this.mtfSymbol[i];
|
||||
for(var k: any = i-1; k>=0; k--) {
|
||||
this.mtfSymbol[k+1] = this.mtfSymbol[k];
|
||||
}
|
||||
this.mtfSymbol[0] = uc
|
||||
uc = this.symToByte[uc];
|
||||
this.byteCount[uc]++;
|
||||
buf[count++] = uc;
|
||||
}
|
||||
if (origPtr < 0 || origPtr >= count) messageArg.Error("I'm a monkey and I'm throwing something at someone, namely you");
|
||||
var j = 0;
|
||||
for(var i = 0; i < 256; i++) {
|
||||
k = j + this.byteCount[i];
|
||||
this.byteCount[i] = j;
|
||||
j = k;
|
||||
}
|
||||
for(var i = 0; i < count; i++) {
|
||||
uc = buf[i] & 0xff;
|
||||
buf[this.byteCount[uc]] |= (i << 8);
|
||||
this.byteCount[uc]++;
|
||||
}
|
||||
var pos = 0, current = 0, run = 0;
|
||||
if (count) {
|
||||
pos = buf[origPtr];
|
||||
current = (pos & 0xff);
|
||||
pos >>= 8;
|
||||
run = -1;
|
||||
}
|
||||
count = count;
|
||||
var copies, previous, outbyte;
|
||||
while(count) {
|
||||
count--;
|
||||
previous = current;
|
||||
pos = buf[pos];
|
||||
current = pos & 0xff;
|
||||
pos >>= 8;
|
||||
if (run++ == 3) {
|
||||
copies = current;
|
||||
outbyte = previous;
|
||||
current = -1;
|
||||
} else {
|
||||
copies = 1;
|
||||
outbyte = current;
|
||||
}
|
||||
while(copies--) {
|
||||
crc = ((crc << 8) ^ this.crcTable[((crc>>24) ^ outbyte) & 0xFF])&0xFFFFFFFF; // crc32
|
||||
stream(outbyte);
|
||||
}
|
||||
if (current != previous) run = 0;
|
||||
}
|
||||
|
||||
crc = (crc ^ (-1)) >>> 0;
|
||||
if ((crc|0) != (crcblock|0)) messageArg.Error("Error in bzip2: crc32 do not match");
|
||||
streamCRC = (crc ^ ((streamCRC << 1) | (streamCRC >>> 31))) & 0xFFFFFFFF;
|
||||
return streamCRC;
|
||||
};
|
||||
};
|
@ -1,93 +0,0 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
|
||||
import { Bzip2 } from './bzip2.js';
|
||||
import { bitIterator } from './bititerator.js';
|
||||
|
||||
export function unbzip2Stream() {
|
||||
const bzip2Instance = new Bzip2();
|
||||
var bufferQueue = [];
|
||||
var hasBytes = 0;
|
||||
var blockSize = 0;
|
||||
var broken = false;
|
||||
var done = false;
|
||||
var bitReader = null;
|
||||
var streamCRC = null;
|
||||
|
||||
function decompressBlock() {
|
||||
if (!blockSize) {
|
||||
blockSize = bzip2Instance.header(bitReader);
|
||||
streamCRC = 0;
|
||||
} else {
|
||||
var bufsize = 100000 * blockSize;
|
||||
var buf = new Int32Array(bufsize);
|
||||
|
||||
var chunk = [];
|
||||
var f = function (b) {
|
||||
chunk.push(b);
|
||||
};
|
||||
|
||||
streamCRC = bzip2Instance.decompress(bitReader, f, buf, bufsize, streamCRC);
|
||||
if (streamCRC === null) {
|
||||
// reset for next bzip2 header
|
||||
blockSize = 0;
|
||||
return;
|
||||
} else {
|
||||
return Buffer.from(chunk);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var outlength = 0;
|
||||
const decompressAndPush = async () => {
|
||||
if (broken) return;
|
||||
try {
|
||||
const resultChunk = decompressBlock();
|
||||
if (resultChunk) {
|
||||
outlength += resultChunk.length;
|
||||
}
|
||||
return resultChunk;
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
broken = true;
|
||||
}
|
||||
};
|
||||
let counter = 0;
|
||||
return new plugins.smartstream.SmartDuplex({
|
||||
objectMode: true,
|
||||
name: 'bzip2',
|
||||
debug: false,
|
||||
highWaterMark: 1,
|
||||
writeFunction: async function (data, streamTools) {
|
||||
// console.log(`got chunk ${counter++}`)
|
||||
bufferQueue.push(data);
|
||||
hasBytes += data.length;
|
||||
if (bitReader === null) {
|
||||
bitReader = bitIterator(function () {
|
||||
return bufferQueue.shift();
|
||||
});
|
||||
}
|
||||
while (!broken && hasBytes - bitReader.bytesRead + 1 >= (25000 + 100000 * blockSize || 4)) {
|
||||
//console.error('decompressing with', hasBytes - bitReader.bytesRead + 1, 'bytes in buffer');
|
||||
const result = await decompressAndPush();
|
||||
if (!result) {
|
||||
continue;
|
||||
}
|
||||
// console.log(result.toString());
|
||||
await streamTools.push(result);
|
||||
}
|
||||
},
|
||||
finalFunction: async function (streamTools) {
|
||||
//console.error(x,'last compressing with', hasBytes, 'bytes in buffer');
|
||||
while (!broken && bitReader && hasBytes > bitReader.bytesRead) {
|
||||
const result = await decompressAndPush();
|
||||
if (!result) {
|
||||
continue;
|
||||
}
|
||||
await streamTools.push(result);
|
||||
}
|
||||
if (!broken) {
|
||||
if (streamCRC !== null) this.emit('error', new Error('input stream ended prematurely'));
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
@ -1,82 +0,0 @@
|
||||
import type { SmartArchive } from './classes.smartarchive.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
export interface IAnalyzedResult {
|
||||
fileType: plugins.fileType.FileTypeResult;
|
||||
isArchive: boolean;
|
||||
resultStream: plugins.smartstream.SmartDuplex;
|
||||
decompressionStream: plugins.stream.Transform | plugins.stream.Duplex | plugins.tarStream.Extract;
|
||||
}
|
||||
|
||||
export class ArchiveAnalyzer {
|
||||
smartArchiveRef: SmartArchive;
|
||||
|
||||
constructor(smartArchiveRefArg: SmartArchive) {
|
||||
this.smartArchiveRef = smartArchiveRefArg;
|
||||
}
|
||||
|
||||
private async mimeTypeIsArchive(mimeType: string): Promise<boolean> {
|
||||
const archiveMimeTypes: Set<string> = new Set([
|
||||
'application/zip',
|
||||
'application/x-rar-compressed',
|
||||
'application/x-tar',
|
||||
'application/gzip',
|
||||
'application/x-7z-compressed',
|
||||
'application/x-bzip2',
|
||||
// Add other archive mime types here
|
||||
]);
|
||||
|
||||
return archiveMimeTypes.has(mimeType);
|
||||
}
|
||||
|
||||
|
||||
private async getDecompressionStream(
|
||||
mimeTypeArg: plugins.fileType.FileTypeResult['mime']
|
||||
): Promise<plugins.stream.Transform | plugins.stream.Duplex | plugins.tarStream.Extract> {
|
||||
switch (mimeTypeArg) {
|
||||
case 'application/gzip':
|
||||
return this.smartArchiveRef.gzipTools.getDecompressionStream();
|
||||
case 'application/zip':
|
||||
return this.smartArchiveRef.zipTools.getDecompressionStream();
|
||||
case 'application/x-bzip2':
|
||||
return await this.smartArchiveRef.bzip2Tools.getDecompressionStream(); // replace with your own bzip2 decompression stream
|
||||
case 'application/x-tar':
|
||||
return this.smartArchiveRef.tarTools.getDecompressionStream(); // replace with your own tar decompression stream
|
||||
default:
|
||||
// Handle unsupported formats or no decompression needed
|
||||
return plugins.smartstream.createPassThrough();
|
||||
}
|
||||
}
|
||||
|
||||
public getAnalyzedStream() {
|
||||
let firstRun = true;
|
||||
const resultStream = plugins.smartstream.createPassThrough();
|
||||
const analyzerstream = new plugins.smartstream.SmartDuplex<Buffer, IAnalyzedResult>({
|
||||
readableObjectMode: true,
|
||||
writeFunction: async (chunkArg: Buffer, streamtools) => {
|
||||
if (firstRun) {
|
||||
firstRun = false;
|
||||
const fileType = await plugins.fileType.fileTypeFromBuffer(chunkArg);
|
||||
const decompressionStream = await this.getDecompressionStream(fileType?.mime as any);
|
||||
/**
|
||||
* analyzed stream emits once with this object
|
||||
*/
|
||||
const result: IAnalyzedResult = {
|
||||
fileType,
|
||||
isArchive: await this.mimeTypeIsArchive(fileType?.mime),
|
||||
resultStream,
|
||||
decompressionStream,
|
||||
};
|
||||
await streamtools.push(result);
|
||||
}
|
||||
await resultStream.backpressuredPush(chunkArg);
|
||||
return null;
|
||||
},
|
||||
finalFunction: async (tools) => {
|
||||
resultStream.push(null);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
return analyzerstream;
|
||||
}
|
||||
}
|
@ -1,16 +0,0 @@
|
||||
import type { SmartArchive } from './classes.smartarchive.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
import { unbzip2Stream } from './bzip2/index.js';
|
||||
|
||||
export class Bzip2Tools {
|
||||
smartArchiveRef: SmartArchive;
|
||||
|
||||
constructor(smartArchiveRefArg: SmartArchive) {
|
||||
this.smartArchiveRef = smartArchiveRefArg;
|
||||
}
|
||||
|
||||
getDecompressionStream() {
|
||||
return unbzip2Stream();
|
||||
}
|
||||
}
|
@ -1,56 +0,0 @@
|
||||
import type { SmartArchive } from './classes.smartarchive.js';
|
||||
import * as plugins from './plugins.js'
|
||||
|
||||
// This class wraps fflate's gunzip in a Node.js Transform stream
|
||||
export class CompressGunzipTransform extends plugins.stream.Transform {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
|
||||
_transform(chunk: Buffer, encoding: BufferEncoding, callback: plugins.stream.TransformCallback) {
|
||||
plugins.fflate.gunzip(chunk, (err, decompressed) => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
} else {
|
||||
this.push(decompressed);
|
||||
callback();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// DecompressGunzipTransform class that extends the Node.js Transform stream to
|
||||
// create a stream that decompresses GZip-compressed data using fflate's gunzip function
|
||||
export class DecompressGunzipTransform extends plugins.stream.Transform {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
|
||||
_transform(chunk: Buffer, encoding: BufferEncoding, callback: plugins.stream.TransformCallback) {
|
||||
// Use fflate's gunzip function to decompress the chunk
|
||||
plugins.fflate.gunzip(chunk, (err, decompressed) => {
|
||||
if (err) {
|
||||
// If an error occurs during decompression, pass the error to the callback
|
||||
callback(err);
|
||||
} else {
|
||||
// If decompression is successful, push the decompressed data into the stream
|
||||
this.push(decompressed);
|
||||
callback();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export class GzipTools {
|
||||
constructor() {
|
||||
}
|
||||
|
||||
public getCompressionStream() {
|
||||
return new CompressGunzipTransform();
|
||||
}
|
||||
|
||||
public getDecompressionStream() {
|
||||
return new DecompressGunzipTransform();
|
||||
}
|
||||
}
|
@ -1,176 +0,0 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import * as paths from './paths.js';
|
||||
|
||||
import { Bzip2Tools } from './classes.bzip2tools.js';
|
||||
import { GzipTools } from './classes.gziptools.js';
|
||||
import { TarTools } from './classes.tartools.js';
|
||||
import { ZipTools } from './classes.ziptools.js';
|
||||
|
||||
import { ArchiveAnalyzer, type IAnalyzedResult } from './classes.archiveanalyzer.js';
|
||||
|
||||
import type { from } from '@push.rocks/smartrx/dist_ts/smartrx.plugins.rxjs.js';
|
||||
|
||||
export class SmartArchive {
|
||||
// STATIC
|
||||
public static async fromArchiveUrl(urlArg: string): Promise<SmartArchive> {
|
||||
const smartArchiveInstance = new SmartArchive();
|
||||
smartArchiveInstance.sourceUrl = urlArg;
|
||||
return smartArchiveInstance;
|
||||
}
|
||||
|
||||
public static async fromArchiveFile(filePathArg: string): Promise<SmartArchive> {
|
||||
const smartArchiveInstance = new SmartArchive();
|
||||
smartArchiveInstance.sourceFilePath = filePathArg;
|
||||
return smartArchiveInstance;
|
||||
}
|
||||
|
||||
public static async fromArchiveStream(
|
||||
streamArg: plugins.stream.Readable | plugins.stream.Duplex | plugins.stream.Transform
|
||||
): Promise<SmartArchive> {
|
||||
const smartArchiveInstance = new SmartArchive();
|
||||
smartArchiveInstance.sourceStream = streamArg;
|
||||
return smartArchiveInstance;
|
||||
}
|
||||
|
||||
// INSTANCE
|
||||
public tarTools = new TarTools();
|
||||
public zipTools = new ZipTools();
|
||||
public gzipTools = new GzipTools();
|
||||
public bzip2Tools = new Bzip2Tools(this);
|
||||
public archiveAnalyzer = new ArchiveAnalyzer(this);
|
||||
|
||||
public sourceUrl: string;
|
||||
public sourceFilePath: string;
|
||||
public sourceStream: plugins.stream.Readable | plugins.stream.Duplex | plugins.stream.Transform;
|
||||
|
||||
public archiveName: string;
|
||||
public singleFileMode: boolean = false;
|
||||
|
||||
public addedDirectories: string[] = [];
|
||||
public addedFiles: (plugins.smartfile.SmartFile | plugins.smartfile.StreamFile)[] = [];
|
||||
public addedUrls: string[] = [];
|
||||
|
||||
constructor() {}
|
||||
|
||||
/**
|
||||
* gets the original archive stream
|
||||
*/
|
||||
public async getArchiveStream() {
|
||||
if (this.sourceStream) {
|
||||
return this.sourceStream;
|
||||
}
|
||||
if (this.sourceUrl) {
|
||||
const urlStream = await plugins.smartrequest.getStream(this.sourceUrl);
|
||||
return urlStream;
|
||||
}
|
||||
if (this.sourceFilePath) {
|
||||
const fileStream = plugins.smartfile.fs.toReadStream(this.sourceFilePath);
|
||||
return fileStream;
|
||||
}
|
||||
}
|
||||
|
||||
public async exportToTarGzStream() {
|
||||
const tarPackStream = await this.tarTools.getPackStream();
|
||||
const gzipStream = await this.gzipTools.getCompressionStream();
|
||||
// const archiveStream = tarPackStream.pipe(gzipStream);
|
||||
// return archiveStream;
|
||||
}
|
||||
|
||||
public async exportToFs(targetDir: string, fileNameArg?: string): Promise<void> {
|
||||
const done = plugins.smartpromise.defer<void>();
|
||||
const streamFileStream = await this.exportToStreamOfStreamFiles();
|
||||
streamFileStream.pipe(
|
||||
new plugins.smartstream.SmartDuplex({
|
||||
objectMode: true,
|
||||
writeFunction: async (streamFileArg: plugins.smartfile.StreamFile, streamtools) => {
|
||||
const done = plugins.smartpromise.defer<void>();
|
||||
console.log(streamFileArg.relativeFilePath ? streamFileArg.relativeFilePath : 'no relative path');
|
||||
const streamFile = streamFileArg;
|
||||
const readStream = await streamFile.createReadStream();
|
||||
await plugins.smartfile.fs.ensureDir(targetDir);
|
||||
const writePath = plugins.path.join(
|
||||
targetDir,
|
||||
streamFile.relativeFilePath || fileNameArg
|
||||
);
|
||||
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(writePath));
|
||||
const writeStream = plugins.smartfile.fsStream.createWriteStream(writePath);
|
||||
readStream.pipe(writeStream);
|
||||
writeStream.on('finish', () => {
|
||||
done.resolve();
|
||||
});
|
||||
await done.promise;
|
||||
},
|
||||
finalFunction: async () => {
|
||||
done.resolve();
|
||||
},
|
||||
})
|
||||
);
|
||||
return done.promise;
|
||||
}
|
||||
|
||||
public async exportToStreamOfStreamFiles() {
|
||||
const streamFileIntake = new plugins.smartstream.StreamIntake<plugins.smartfile.StreamFile>({
|
||||
objectMode: true,
|
||||
});
|
||||
const archiveStream = await this.getArchiveStream();
|
||||
const createAnalyzedStream = () => this.archiveAnalyzer.getAnalyzedStream();
|
||||
|
||||
// lets create a function that can be called multiple times to unpack layers of archives
|
||||
const createUnpackStream = () =>
|
||||
plugins.smartstream.createTransformFunction<IAnalyzedResult, any>(
|
||||
async (analyzedResultChunk) => {
|
||||
if (analyzedResultChunk.fileType?.mime === 'application/x-tar') {
|
||||
const tarStream = analyzedResultChunk.decompressionStream as plugins.tarStream.Extract;
|
||||
tarStream.on('entry', async (header, stream, next) => {
|
||||
if (header.type === 'directory') {
|
||||
console.log(`tar stream directory: ${header.name} ... skipping!`);
|
||||
next();
|
||||
return;
|
||||
}
|
||||
console.log(`tar stream file: ${header.name}`);
|
||||
const streamfile = plugins.smartfile.StreamFile.fromStream(stream, header.name);
|
||||
streamFileIntake.push(streamfile);
|
||||
stream.on('end', function () {
|
||||
next(); // ready for next entry
|
||||
});
|
||||
});
|
||||
tarStream.on('finish', function () {
|
||||
console.log('finished');
|
||||
streamFileIntake.signalEnd();
|
||||
});
|
||||
analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream);
|
||||
} else if (analyzedResultChunk.fileType?.mime === 'application/zip') {
|
||||
analyzedResultChunk.resultStream
|
||||
.pipe(analyzedResultChunk.decompressionStream)
|
||||
.pipe(new plugins.smartstream.SmartDuplex({
|
||||
objectMode: true,
|
||||
writeFunction: async (streamFileArg: plugins.smartfile.StreamFile, streamtools) => {
|
||||
streamFileIntake.push(streamFileArg);
|
||||
},
|
||||
finalFunction: async () => {
|
||||
streamFileIntake.signalEnd();
|
||||
}
|
||||
}));
|
||||
} else if (analyzedResultChunk.isArchive && analyzedResultChunk.decompressionStream) {
|
||||
analyzedResultChunk.resultStream
|
||||
.pipe(analyzedResultChunk.decompressionStream)
|
||||
.pipe(createAnalyzedStream())
|
||||
.pipe(createUnpackStream());
|
||||
} else {
|
||||
const streamFile = plugins.smartfile.StreamFile.fromStream(
|
||||
analyzedResultChunk.resultStream,
|
||||
analyzedResultChunk.fileType?.ext
|
||||
);
|
||||
streamFileIntake.push(streamFile);
|
||||
streamFileIntake.signalEnd();
|
||||
}
|
||||
},
|
||||
{
|
||||
objectMode: true,
|
||||
}
|
||||
);
|
||||
|
||||
archiveStream.pipe(createAnalyzedStream()).pipe(createUnpackStream());
|
||||
return streamFileIntake;
|
||||
}
|
||||
}
|
@ -1,127 +0,0 @@
|
||||
import type { SmartArchive } from './classes.smartarchive.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
export class TarTools {
|
||||
// INSTANCE
|
||||
constructor() {}
|
||||
|
||||
// packing
|
||||
public async addFileToPack(
|
||||
pack: plugins.tarStream.Pack,
|
||||
optionsArg: {
|
||||
fileName?: string;
|
||||
content?:
|
||||
| string
|
||||
| Buffer
|
||||
| plugins.smartstream.stream.Readable
|
||||
| plugins.smartfile.SmartFile
|
||||
| plugins.smartfile.StreamFile;
|
||||
byteLength?: number;
|
||||
filePath?: string;
|
||||
}
|
||||
): Promise<void> {
|
||||
return new Promise<void>(async (resolve, reject) => {
|
||||
let fileName: string | null = null;
|
||||
|
||||
if (optionsArg.fileName) {
|
||||
fileName = optionsArg.fileName;
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
|
||||
fileName = (optionsArg.content as plugins.smartfile.SmartFile).relative;
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
|
||||
fileName = (optionsArg.content as plugins.smartfile.StreamFile).relativeFilePath;
|
||||
} else if (optionsArg.filePath) {
|
||||
fileName = optionsArg.filePath;
|
||||
}
|
||||
|
||||
/**
|
||||
* contentByteLength is used to set the size of the entry in the tar file
|
||||
*/
|
||||
let contentByteLength: number;
|
||||
if (optionsArg.byteLength) {
|
||||
contentByteLength = optionsArg.byteLength;
|
||||
} else if (typeof optionsArg.content === 'string') {
|
||||
contentByteLength = Buffer.byteLength(optionsArg.content, 'utf8');
|
||||
} else if (Buffer.isBuffer(optionsArg.content)) {
|
||||
contentByteLength = optionsArg.content.length;
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
|
||||
contentByteLength = await optionsArg.content.getSize(); // assuming SmartFile has getSize method
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
|
||||
contentByteLength = await optionsArg.content.getSize(); // assuming StreamFile has getSize method
|
||||
} else if (optionsArg.content instanceof plugins.smartstream.stream.Readable) {
|
||||
console.warn(
|
||||
'@push.rocks/smartarchive: When streaming, it is recommended to provide byteLength, if known.'
|
||||
);
|
||||
} else if (optionsArg.filePath) {
|
||||
const fileStat = await plugins.smartfile.fs.stat(optionsArg.filePath);
|
||||
contentByteLength = fileStat.size;
|
||||
}
|
||||
|
||||
/**
|
||||
* here we try to harmonize all kind of entries towards a readable stream
|
||||
*/
|
||||
let content: plugins.smartstream.stream.Readable;
|
||||
if (Buffer.isBuffer(optionsArg.content)) {
|
||||
content = plugins.smartstream.stream.Readable.from(optionsArg.content);
|
||||
} else if (typeof optionsArg.content === 'string') {
|
||||
content = plugins.smartstream.stream.Readable.from(Buffer.from(optionsArg.content));
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
|
||||
content = plugins.smartstream.stream.Readable.from(optionsArg.content.contents);
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
|
||||
content = await optionsArg.content.createReadStream();
|
||||
} else if (optionsArg.content instanceof plugins.smartstream.stream.Readable) {
|
||||
content = optionsArg.content;
|
||||
}
|
||||
|
||||
const entry = pack.entry(
|
||||
{
|
||||
name: fileName,
|
||||
...(contentByteLength
|
||||
? {
|
||||
size: contentByteLength,
|
||||
}
|
||||
: null),
|
||||
},
|
||||
(err: Error) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
content.pipe(entry);
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* packs a directory from disk into a tar stream
|
||||
* @param directoryPath
|
||||
*/
|
||||
public async packDirectory(directoryPath: string) {
|
||||
const fileTree = await plugins.smartfile.fs.listFileTree(directoryPath, '**/*');
|
||||
const pack = await this.getPackStream();
|
||||
for (const filePath of fileTree) {
|
||||
const absolutePath = plugins.path.join(directoryPath, filePath);
|
||||
const fileStat = await plugins.smartfile.fs.stat(absolutePath);
|
||||
await this.addFileToPack(pack, {
|
||||
byteLength: fileStat.size,
|
||||
filePath: absolutePath,
|
||||
fileName: filePath,
|
||||
content: plugins.smartfile.fsStream.createReadStream(absolutePath),
|
||||
});
|
||||
}
|
||||
return pack;
|
||||
}
|
||||
|
||||
public async getPackStream() {
|
||||
const pack = plugins.tarStream.pack();
|
||||
return pack;
|
||||
}
|
||||
|
||||
// extracting
|
||||
getDecompressionStream() {
|
||||
return plugins.tarStream.extract();
|
||||
}
|
||||
}
|
@ -1,74 +0,0 @@
|
||||
import type { SmartArchive } from './classes.smartarchive.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
class DecompressZipTransform extends plugins.smartstream.SmartDuplex<ArrayBufferLike> {
|
||||
private streamtools: plugins.smartstream.IStreamTools;
|
||||
private unzipper = new plugins.fflate.Unzip(async (fileArg) => {
|
||||
let resultBuffer: Buffer;
|
||||
fileArg.ondata = async (flateError, dat, final) => {
|
||||
resultBuffer? resultBuffer = Buffer.concat([resultBuffer, Buffer.from(dat)])
|
||||
: resultBuffer = Buffer.from(dat);
|
||||
if (final) {
|
||||
const streamFile = plugins.smartfile.StreamFile.fromBuffer(resultBuffer);
|
||||
streamFile.relativeFilePath = fileArg.name;
|
||||
this.streamtools.push(streamFile);
|
||||
}
|
||||
}
|
||||
fileArg.start();
|
||||
});
|
||||
constructor() {
|
||||
super({
|
||||
objectMode: true,
|
||||
writeFunction: async (chunkArg: Buffer, streamtoolsArg) => {
|
||||
this.streamtools? null : this.streamtools = streamtoolsArg;
|
||||
this.unzipper.push(chunkArg, false);
|
||||
},
|
||||
finalFunction: async () => {
|
||||
this.unzipper.push(Buffer.from(''), true);
|
||||
await plugins.smartdelay.delayFor(0);
|
||||
await this.streamtools.push(null);
|
||||
}
|
||||
});
|
||||
this.unzipper.register(plugins.fflate.UnzipInflate);
|
||||
}
|
||||
}
|
||||
|
||||
// This class wraps fflate's zip in a Node.js Transform stream for compression
|
||||
export class CompressZipTransform extends plugins.stream.Transform {
|
||||
files: { [fileName: string]: Uint8Array };
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.files = {};
|
||||
}
|
||||
|
||||
_transform(chunk: Buffer, encoding: BufferEncoding, callback: plugins.stream.TransformCallback) {
|
||||
// Simple example: storing chunks in memory before finalizing ZIP in _flush
|
||||
this.files['file.txt'] = new Uint8Array(chunk);
|
||||
callback();
|
||||
}
|
||||
|
||||
_flush(callback: plugins.stream.TransformCallback) {
|
||||
plugins.fflate.zip(this.files, (err, zipped) => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
} else {
|
||||
this.push(Buffer.from(zipped));
|
||||
callback();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export class ZipTools {
|
||||
constructor() {
|
||||
}
|
||||
|
||||
public getCompressionStream() {
|
||||
return new CompressZipTransform();
|
||||
}
|
||||
|
||||
public getDecompressionStream() {
|
||||
return new DecompressZipTransform();
|
||||
}
|
||||
}
|
@ -1,4 +1 @@
|
||||
export * from './classes.smartarchive.js';
|
||||
export * from './classes.tartools.js';
|
||||
export * from './classes.ziptools.js';
|
||||
export * from './classes.gziptools.js';
|
||||
export * from './smartarchive.classes.smartarchive';
|
||||
|
@ -1,7 +0,0 @@
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
export const packageDir = plugins.path.join(
|
||||
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
|
||||
'../'
|
||||
);
|
||||
export const nogitDir = plugins.path.join(packageDir, './.nogit');
|
@ -1,25 +0,0 @@
|
||||
// node native scope
|
||||
import * as path from 'path';
|
||||
import * as stream from 'stream';
|
||||
|
||||
export { path, stream };
|
||||
|
||||
// @pushrocks scope
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartdelay from '@push.rocks/smartdelay';
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smartrequest from '@push.rocks/smartrequest';
|
||||
import * as smartunique from '@push.rocks/smartunique';
|
||||
import * as smartstream from '@push.rocks/smartstream';
|
||||
import * as smartrx from '@push.rocks/smartrx';
|
||||
import * as smarturl from '@push.rocks/smarturl';
|
||||
|
||||
export { smartfile, smartdelay, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx, smarturl };
|
||||
|
||||
// third party scope
|
||||
import * as fileType from 'file-type';
|
||||
import * as fflate from 'fflate';
|
||||
import tarStream from 'tar-stream';
|
||||
|
||||
export { fileType, fflate, tarStream };
|
38
ts/smartarchive.classes.smartarchive.ts
Normal file
38
ts/smartarchive.classes.smartarchive.ts
Normal file
@ -0,0 +1,38 @@
|
||||
import * as plugins from './smartarchive.plugins';
|
||||
import * as paths from './smartarchive.paths';
|
||||
|
||||
export class SmartArchive {
|
||||
public archiveDirectory: string;
|
||||
constructor() {}
|
||||
|
||||
/**
|
||||
* extracts an archive from a given url
|
||||
*/
|
||||
public async extractArchiveFromUrl(urlArg: string, targetDir: string) {
|
||||
const parsedPath = plugins.path.parse(urlArg);
|
||||
const uniqueFileName = plugins.smartunique.uni() + parsedPath.ext;
|
||||
const downloadPath = plugins.path.join(paths.nogitDir, uniqueFileName);
|
||||
const downloadedArchive = (await plugins.smartrequest.getBinary(urlArg)).body;
|
||||
await plugins.smartfile.memory.toFs(downloadedArchive, downloadPath);
|
||||
await this.extractArchiveFromFilePath(downloadPath, targetDir);
|
||||
await plugins.smartfile.fs.remove(downloadPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* extracts an archive from a given filePath on disk
|
||||
* @param filePathArg
|
||||
* @param targetDir
|
||||
*/
|
||||
public async extractArchiveFromFilePath(filePathArg: string, targetDir: string) {
|
||||
const parsedPath = plugins.path.parse(filePathArg);
|
||||
switch (parsedPath.ext) {
|
||||
case '.tgz':
|
||||
console.log(`detected a .tgz archive`);
|
||||
await plugins.tar.extract({
|
||||
file: filePathArg,
|
||||
cwd: targetDir
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
5
ts/smartarchive.paths.ts
Normal file
5
ts/smartarchive.paths.ts
Normal file
@ -0,0 +1,5 @@
|
||||
import * as plugins from './smartarchive.plugins';
|
||||
|
||||
export const packageDir = plugins.path.join(__dirname, '../');
|
||||
export const nogitDir = plugins.path.join(__dirname, './.nogit');
|
||||
plugins.smartfile.fs.ensureDir(nogitDir);
|
17
ts/smartarchive.plugins.ts
Normal file
17
ts/smartarchive.plugins.ts
Normal file
@ -0,0 +1,17 @@
|
||||
// node native scope
|
||||
import path from 'path';
|
||||
|
||||
export { path };
|
||||
|
||||
// @pushrocks scope
|
||||
import * as smartfile from '@pushrocks/smartfile';
|
||||
import * as smartpath from '@pushrocks/smartpath';
|
||||
import * as smartrequest from '@pushrocks/smartrequest';
|
||||
import * as smartunique from '@pushrocks/smartunique';
|
||||
|
||||
export { smartfile, smartpath, smartrequest, smartunique };
|
||||
|
||||
// third party scope
|
||||
import tar from 'tar';
|
||||
|
||||
export { tar };
|
@ -1,14 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"experimentalDecorators": true,
|
||||
"useDefineForClassFields": false,
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"esModuleInterop": true,
|
||||
"verbatimModuleSyntax": true
|
||||
},
|
||||
"exclude": [
|
||||
"dist_*/**/*.d.ts"
|
||||
]
|
||||
}
|
17
tslint.json
Normal file
17
tslint.json
Normal file
@ -0,0 +1,17 @@
|
||||
{
|
||||
"extends": ["tslint:latest", "tslint-config-prettier"],
|
||||
"rules": {
|
||||
"semicolon": [true, "always"],
|
||||
"no-console": false,
|
||||
"ordered-imports": false,
|
||||
"object-literal-sort-keys": false,
|
||||
"member-ordering": {
|
||||
"options":{
|
||||
"order": [
|
||||
"static-method"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"defaultSeverity": "warning"
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user