feat(archive): introduce ts_shared browser-compatible layer, refactor Node-specific tools to wrap/shared implementations, and modernize archive handling
This commit is contained in:
@@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartarchive',
|
||||
version: '5.0.1',
|
||||
version: '5.1.0',
|
||||
description: 'A library for working with archive files, providing utilities for compressing and decompressing data.'
|
||||
}
|
||||
|
||||
@@ -1,60 +0,0 @@
|
||||
import type { IBitReader } from '../interfaces.js';
|
||||
|
||||
const BITMASK = [0, 0x01, 0x03, 0x07, 0x0f, 0x1f, 0x3f, 0x7f, 0xff] as const;
|
||||
|
||||
/**
|
||||
* Creates a bit reader function for BZIP2 decompression.
|
||||
* Takes a buffer iterator as input and returns a function that reads bits.
|
||||
*/
|
||||
export function bitIterator(nextBuffer: () => Buffer): IBitReader {
|
||||
let bit = 0;
|
||||
let byte = 0;
|
||||
let bytes = nextBuffer();
|
||||
let _bytesRead = 0;
|
||||
|
||||
const reader = function (n: number | null): number | void {
|
||||
if (n === null && bit !== 0) {
|
||||
// align to byte boundary
|
||||
bit = 0;
|
||||
byte++;
|
||||
return;
|
||||
}
|
||||
|
||||
let result = 0;
|
||||
let remaining = n as number;
|
||||
|
||||
while (remaining > 0) {
|
||||
if (byte >= bytes.length) {
|
||||
byte = 0;
|
||||
bytes = nextBuffer();
|
||||
}
|
||||
|
||||
const left = 8 - bit;
|
||||
|
||||
if (bit === 0 && remaining > 0) {
|
||||
_bytesRead++;
|
||||
}
|
||||
|
||||
if (remaining >= left) {
|
||||
result <<= left;
|
||||
result |= BITMASK[left] & bytes[byte++];
|
||||
bit = 0;
|
||||
remaining -= left;
|
||||
} else {
|
||||
result <<= remaining;
|
||||
result |= (bytes[byte] & (BITMASK[remaining] << (8 - remaining - bit))) >> (8 - remaining - bit);
|
||||
bit += remaining;
|
||||
remaining = 0;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
} as IBitReader;
|
||||
|
||||
Object.defineProperty(reader, 'bytesRead', {
|
||||
get: () => _bytesRead,
|
||||
enumerable: true,
|
||||
});
|
||||
|
||||
return reader;
|
||||
}
|
||||
@@ -1,449 +0,0 @@
|
||||
import { Bzip2Error, BZIP2_ERROR_CODES } from '../errors.js';
|
||||
import type { IBitReader, IHuffmanGroup } from '../interfaces.js';
|
||||
|
||||
// Re-export Bzip2Error for backward compatibility
|
||||
export { Bzip2Error };
|
||||
|
||||
/**
|
||||
* Throw a BZIP2 error with proper error code
|
||||
*/
|
||||
function throwError(message: string, code: string = BZIP2_ERROR_CODES.INVALID_BLOCK_DATA): never {
|
||||
throw new Bzip2Error(message, code);
|
||||
}
|
||||
|
||||
/**
|
||||
* BZIP2 decompression implementation
|
||||
*/
|
||||
export class Bzip2 {
|
||||
// CRC32 lookup table for BZIP2
|
||||
public readonly crcTable: readonly number[] = [
|
||||
0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9, 0x130476dc, 0x17c56b6b,
|
||||
0x1a864db2, 0x1e475005, 0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61,
|
||||
0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd, 0x4c11db70, 0x48d0c6c7,
|
||||
0x4593e01e, 0x4152fda9, 0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75,
|
||||
0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011, 0x791d4014, 0x7ddc5da3,
|
||||
0x709f7b7a, 0x745e66cd, 0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039,
|
||||
0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5, 0xbe2b5b58, 0xbaea46ef,
|
||||
0xb7a96036, 0xb3687d81, 0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d,
|
||||
0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49, 0xc7361b4c, 0xc3f706fb,
|
||||
0xceb42022, 0xca753d95, 0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1,
|
||||
0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d, 0x34867077, 0x30476dc0,
|
||||
0x3d044b19, 0x39c556ae, 0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072,
|
||||
0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16, 0x018aeb13, 0x054bf6a4,
|
||||
0x0808d07d, 0x0cc9cdca, 0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde,
|
||||
0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02, 0x5e9f46bf, 0x5a5e5b08,
|
||||
0x571d7dd1, 0x53dc6066, 0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba,
|
||||
0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e, 0xbfa1b04b, 0xbb60adfc,
|
||||
0xb6238b25, 0xb2e29692, 0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6,
|
||||
0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a, 0xe0b41de7, 0xe4750050,
|
||||
0xe9362689, 0xedf73b3e, 0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2,
|
||||
0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686, 0xd5b88683, 0xd1799b34,
|
||||
0xdc3abded, 0xd8fba05a, 0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637,
|
||||
0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb, 0x4f040d56, 0x4bc510e1,
|
||||
0x46863638, 0x42472b8f, 0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53,
|
||||
0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47, 0x36194d42, 0x32d850f5,
|
||||
0x3f9b762c, 0x3b5a6b9b, 0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff,
|
||||
0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623, 0xf12f560e, 0xf5ee4bb9,
|
||||
0xf8ad6d60, 0xfc6c70d7, 0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b,
|
||||
0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f, 0xc423cd6a, 0xc0e2d0dd,
|
||||
0xcda1f604, 0xc960ebb3, 0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7,
|
||||
0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b, 0x9b3660c6, 0x9ff77d71,
|
||||
0x92b45ba8, 0x9675461f, 0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3,
|
||||
0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640, 0x4e8ee645, 0x4a4ffbf2,
|
||||
0x470cdd2b, 0x43cdc09c, 0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8,
|
||||
0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24, 0x119b4be9, 0x155a565e,
|
||||
0x18197087, 0x1cd86d30, 0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec,
|
||||
0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088, 0x2497d08d, 0x2056cd3a,
|
||||
0x2d15ebe3, 0x29d4f654, 0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0,
|
||||
0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c, 0xe3a1cbc1, 0xe760d676,
|
||||
0xea23f0af, 0xeee2ed18, 0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4,
|
||||
0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0, 0x9abc8bd5, 0x9e7d9662,
|
||||
0x933eb0bb, 0x97ffad0c, 0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668,
|
||||
0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4,
|
||||
];
|
||||
|
||||
// State arrays initialized in header()
|
||||
private byteCount!: Int32Array;
|
||||
private symToByte!: Uint8Array;
|
||||
private mtfSymbol!: Int32Array;
|
||||
private selectors!: Uint8Array;
|
||||
|
||||
/**
|
||||
* Create a bit reader from a byte array
|
||||
*/
|
||||
array(bytes: Uint8Array | Buffer): (n: number) => number {
|
||||
let bit = 0;
|
||||
let byte = 0;
|
||||
const BITMASK = [0, 0x01, 0x03, 0x07, 0x0f, 0x1f, 0x3f, 0x7f, 0xff];
|
||||
|
||||
return function (n: number): number {
|
||||
let result = 0;
|
||||
while (n > 0) {
|
||||
const left = 8 - bit;
|
||||
if (n >= left) {
|
||||
result <<= left;
|
||||
result |= BITMASK[left] & bytes[byte++];
|
||||
bit = 0;
|
||||
n -= left;
|
||||
} else {
|
||||
result <<= n;
|
||||
result |= (bytes[byte] & (BITMASK[n] << (8 - n - bit))) >> (8 - n - bit);
|
||||
bit += n;
|
||||
n = 0;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple decompression from a buffer
|
||||
*/
|
||||
simple(srcbuffer: Uint8Array | Buffer, stream: (byte: number) => void): void {
|
||||
const bits = this.array(srcbuffer);
|
||||
const size = this.header(bits as IBitReader);
|
||||
let ret: number | null = 0;
|
||||
const bufsize = 100000 * size;
|
||||
const buf = new Int32Array(bufsize);
|
||||
|
||||
do {
|
||||
ret = this.decompress(bits as IBitReader, stream, buf, bufsize, ret);
|
||||
} while (ret !== null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse BZIP2 header and return block size
|
||||
*/
|
||||
header(bits: IBitReader): number {
|
||||
this.byteCount = new Int32Array(256);
|
||||
this.symToByte = new Uint8Array(256);
|
||||
this.mtfSymbol = new Int32Array(256);
|
||||
this.selectors = new Uint8Array(0x8000);
|
||||
|
||||
if (bits(8 * 3) !== 4348520) {
|
||||
throwError('No BZIP2 magic number found at start of stream', BZIP2_ERROR_CODES.NO_MAGIC_NUMBER);
|
||||
}
|
||||
|
||||
const blockSize = (bits(8) as number) - 48;
|
||||
if (blockSize < 1 || blockSize > 9) {
|
||||
throwError('Invalid BZIP2 archive: block size must be 1-9', BZIP2_ERROR_CODES.INVALID_ARCHIVE);
|
||||
}
|
||||
return blockSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decompress a BZIP2 block
|
||||
*/
|
||||
decompress(
|
||||
bits: IBitReader,
|
||||
stream: (byte: number) => void,
|
||||
buf: Int32Array,
|
||||
bufsize: number,
|
||||
streamCRC?: number | null
|
||||
): number | null {
|
||||
const MAX_HUFCODE_BITS = 20;
|
||||
const MAX_SYMBOLS = 258;
|
||||
const SYMBOL_RUNA = 0;
|
||||
const SYMBOL_RUNB = 1;
|
||||
const GROUP_SIZE = 50;
|
||||
let crc = 0 ^ -1;
|
||||
|
||||
// Read block header
|
||||
let headerHex = '';
|
||||
for (let i = 0; i < 6; i++) {
|
||||
headerHex += (bits(8) as number).toString(16);
|
||||
}
|
||||
|
||||
// Check for end-of-stream marker
|
||||
if (headerHex === '177245385090') {
|
||||
const finalCRC = bits(32) as number | 0;
|
||||
if (finalCRC !== streamCRC) {
|
||||
throwError('CRC32 mismatch: stream checksum verification failed', BZIP2_ERROR_CODES.CRC_MISMATCH);
|
||||
}
|
||||
// Align stream to byte boundary
|
||||
bits(null);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Verify block signature (pi digits)
|
||||
if (headerHex !== '314159265359') {
|
||||
throwError('Invalid block header: expected pi signature (0x314159265359)', BZIP2_ERROR_CODES.INVALID_BLOCK_DATA);
|
||||
}
|
||||
|
||||
const crcblock = bits(32) as number | 0;
|
||||
|
||||
if (bits(1)) {
|
||||
throwError('Unsupported obsolete BZIP2 format version', BZIP2_ERROR_CODES.INVALID_ARCHIVE);
|
||||
}
|
||||
|
||||
const origPtr = bits(24) as number;
|
||||
if (origPtr > bufsize) {
|
||||
throwError('Initial position larger than buffer size', BZIP2_ERROR_CODES.BUFFER_OVERFLOW);
|
||||
}
|
||||
|
||||
// Read symbol map
|
||||
let symbolMapBits = bits(16) as number;
|
||||
let symTotal = 0;
|
||||
for (let i = 0; i < 16; i++) {
|
||||
if (symbolMapBits & (1 << (15 - i))) {
|
||||
const subMap = bits(16) as number;
|
||||
for (let j = 0; j < 16; j++) {
|
||||
if (subMap & (1 << (15 - j))) {
|
||||
this.symToByte[symTotal++] = 16 * i + j;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Read Huffman groups
|
||||
const groupCount = bits(3) as number;
|
||||
if (groupCount < 2 || groupCount > 6) {
|
||||
throwError('Invalid group count: must be between 2 and 6', BZIP2_ERROR_CODES.INVALID_HUFFMAN);
|
||||
}
|
||||
|
||||
const nSelectors = bits(15) as number;
|
||||
if (nSelectors === 0) {
|
||||
throwError('Invalid selector count: cannot be zero', BZIP2_ERROR_CODES.INVALID_SELECTOR);
|
||||
}
|
||||
|
||||
// Initialize MTF symbol array
|
||||
for (let i = 0; i < groupCount; i++) {
|
||||
this.mtfSymbol[i] = i;
|
||||
}
|
||||
|
||||
// Read selectors using MTF decoding
|
||||
for (let i = 0; i < nSelectors; i++) {
|
||||
let j = 0;
|
||||
while (bits(1)) {
|
||||
j++;
|
||||
if (j >= groupCount) {
|
||||
throwError('Invalid MTF index: exceeds group count', BZIP2_ERROR_CODES.INVALID_HUFFMAN);
|
||||
}
|
||||
}
|
||||
const uc = this.mtfSymbol[j];
|
||||
for (let k = j - 1; k >= 0; k--) {
|
||||
this.mtfSymbol[k + 1] = this.mtfSymbol[k];
|
||||
}
|
||||
this.mtfSymbol[0] = uc;
|
||||
this.selectors[i] = uc;
|
||||
}
|
||||
|
||||
// Build Huffman tables
|
||||
const symCount = symTotal + 2;
|
||||
const groups: IHuffmanGroup[] = [];
|
||||
const length = new Uint8Array(MAX_SYMBOLS);
|
||||
const temp = new Uint16Array(MAX_HUFCODE_BITS + 1);
|
||||
|
||||
for (let j = 0; j < groupCount; j++) {
|
||||
let t = bits(5) as number;
|
||||
for (let i = 0; i < symCount; i++) {
|
||||
while (true) {
|
||||
if (t < 1 || t > MAX_HUFCODE_BITS) {
|
||||
throwError('Invalid Huffman code length: must be between 1 and 20', BZIP2_ERROR_CODES.INVALID_HUFFMAN);
|
||||
}
|
||||
if (!bits(1)) break;
|
||||
if (!bits(1)) t++;
|
||||
else t--;
|
||||
}
|
||||
length[i] = t;
|
||||
}
|
||||
|
||||
let minLen = length[0];
|
||||
let maxLen = length[0];
|
||||
for (let i = 1; i < symCount; i++) {
|
||||
if (length[i] > maxLen) maxLen = length[i];
|
||||
else if (length[i] < minLen) minLen = length[i];
|
||||
}
|
||||
|
||||
const hufGroup: IHuffmanGroup = {
|
||||
permute: new Int32Array(MAX_SYMBOLS),
|
||||
limit: new Int32Array(MAX_HUFCODE_BITS + 1),
|
||||
base: new Int32Array(MAX_HUFCODE_BITS + 1),
|
||||
minLen,
|
||||
maxLen,
|
||||
};
|
||||
groups[j] = hufGroup;
|
||||
|
||||
const base = hufGroup.base;
|
||||
const limit = hufGroup.limit;
|
||||
|
||||
let pp = 0;
|
||||
for (let i = minLen; i <= maxLen; i++) {
|
||||
for (let t = 0; t < symCount; t++) {
|
||||
if (length[t] === i) hufGroup.permute[pp++] = t;
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = minLen; i <= maxLen; i++) {
|
||||
temp[i] = 0;
|
||||
limit[i] = 0;
|
||||
}
|
||||
for (let i = 0; i < symCount; i++) {
|
||||
temp[length[i]]++;
|
||||
}
|
||||
|
||||
pp = 0;
|
||||
let tt = 0;
|
||||
for (let i = minLen; i < maxLen; i++) {
|
||||
pp += temp[i];
|
||||
limit[i] = pp - 1;
|
||||
pp <<= 1;
|
||||
base[i + 1] = pp - (tt += temp[i]);
|
||||
}
|
||||
limit[maxLen] = pp + temp[maxLen] - 1;
|
||||
base[minLen] = 0;
|
||||
}
|
||||
|
||||
// Initialize for decoding
|
||||
for (let i = 0; i < 256; i++) {
|
||||
this.mtfSymbol[i] = i;
|
||||
this.byteCount[i] = 0;
|
||||
}
|
||||
|
||||
let runPos = 0;
|
||||
let count = 0;
|
||||
let symCountRemaining = 0;
|
||||
let selector = 0;
|
||||
let hufGroup = groups[0];
|
||||
let base = hufGroup.base;
|
||||
let limit = hufGroup.limit;
|
||||
|
||||
// Main decoding loop
|
||||
while (true) {
|
||||
if (!symCountRemaining--) {
|
||||
symCountRemaining = GROUP_SIZE - 1;
|
||||
if (selector >= nSelectors) {
|
||||
throwError('Invalid selector index: exceeds available groups', BZIP2_ERROR_CODES.INVALID_SELECTOR);
|
||||
}
|
||||
hufGroup = groups[this.selectors[selector++]];
|
||||
base = hufGroup.base;
|
||||
limit = hufGroup.limit;
|
||||
}
|
||||
|
||||
let i = hufGroup.minLen;
|
||||
let j = bits(i) as number;
|
||||
|
||||
while (true) {
|
||||
if (i > hufGroup.maxLen) {
|
||||
throwError('Huffman decoding error: bit length exceeds maximum allowed', BZIP2_ERROR_CODES.INVALID_HUFFMAN);
|
||||
}
|
||||
if (j <= limit[i]) break;
|
||||
i++;
|
||||
j = (j << 1) | (bits(1) as number);
|
||||
}
|
||||
|
||||
j -= base[i];
|
||||
if (j < 0 || j >= MAX_SYMBOLS) {
|
||||
throwError('Symbol index out of bounds during Huffman decoding', BZIP2_ERROR_CODES.INVALID_HUFFMAN);
|
||||
}
|
||||
|
||||
const nextSym = hufGroup.permute[j];
|
||||
|
||||
if (nextSym === SYMBOL_RUNA || nextSym === SYMBOL_RUNB) {
|
||||
if (!runPos) {
|
||||
runPos = 1;
|
||||
j = 0;
|
||||
}
|
||||
if (nextSym === SYMBOL_RUNA) j += runPos;
|
||||
else j += 2 * runPos;
|
||||
runPos <<= 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (runPos) {
|
||||
runPos = 0;
|
||||
const runLength = j;
|
||||
if (count + runLength > bufsize) {
|
||||
throwError('Run-length overflow: decoded run exceeds buffer capacity', BZIP2_ERROR_CODES.BUFFER_OVERFLOW);
|
||||
}
|
||||
const uc = this.symToByte[this.mtfSymbol[0]];
|
||||
this.byteCount[uc] += runLength;
|
||||
for (let t = 0; t < runLength; t++) {
|
||||
buf[count++] = uc;
|
||||
}
|
||||
}
|
||||
|
||||
if (nextSym > symTotal) break;
|
||||
|
||||
if (count >= bufsize) {
|
||||
throwError('Buffer overflow: decoded data exceeds buffer capacity', BZIP2_ERROR_CODES.BUFFER_OVERFLOW);
|
||||
}
|
||||
|
||||
const mtfIndex = nextSym - 1;
|
||||
const uc = this.mtfSymbol[mtfIndex];
|
||||
for (let k = mtfIndex - 1; k >= 0; k--) {
|
||||
this.mtfSymbol[k + 1] = this.mtfSymbol[k];
|
||||
}
|
||||
this.mtfSymbol[0] = uc;
|
||||
const decodedByte = this.symToByte[uc];
|
||||
this.byteCount[decodedByte]++;
|
||||
buf[count++] = decodedByte;
|
||||
}
|
||||
|
||||
if (origPtr < 0 || origPtr >= count) {
|
||||
throwError('Invalid original pointer: position outside decoded block', BZIP2_ERROR_CODES.INVALID_POSITION);
|
||||
}
|
||||
|
||||
// Inverse BWT transform
|
||||
let j = 0;
|
||||
for (let i = 0; i < 256; i++) {
|
||||
const k = j + this.byteCount[i];
|
||||
this.byteCount[i] = j;
|
||||
j = k;
|
||||
}
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const uc = buf[i] & 0xff;
|
||||
buf[this.byteCount[uc]] |= i << 8;
|
||||
this.byteCount[uc]++;
|
||||
}
|
||||
|
||||
// Output decoded data
|
||||
let pos = 0;
|
||||
let current = 0;
|
||||
let run = 0;
|
||||
|
||||
if (count) {
|
||||
pos = buf[origPtr];
|
||||
current = pos & 0xff;
|
||||
pos >>= 8;
|
||||
run = -1;
|
||||
}
|
||||
|
||||
let remaining = count;
|
||||
while (remaining) {
|
||||
remaining--;
|
||||
const previous = current;
|
||||
pos = buf[pos];
|
||||
current = pos & 0xff;
|
||||
pos >>= 8;
|
||||
|
||||
let copies: number;
|
||||
let outbyte: number;
|
||||
|
||||
if (run++ === 3) {
|
||||
copies = current;
|
||||
outbyte = previous;
|
||||
current = -1;
|
||||
} else {
|
||||
copies = 1;
|
||||
outbyte = current;
|
||||
}
|
||||
|
||||
while (copies--) {
|
||||
crc = ((crc << 8) ^ this.crcTable[((crc >> 24) ^ outbyte) & 0xff]) & 0xffffffff;
|
||||
stream(outbyte);
|
||||
}
|
||||
|
||||
if (current !== previous) run = 0;
|
||||
}
|
||||
|
||||
crc = (crc ^ -1) >>> 0;
|
||||
if ((crc | 0) !== (crcblock | 0)) {
|
||||
throwError('CRC32 mismatch: block checksum verification failed', BZIP2_ERROR_CODES.CRC_MISMATCH);
|
||||
}
|
||||
|
||||
const newStreamCRC = (crc ^ (((streamCRC || 0) << 1) | ((streamCRC || 0) >>> 31))) & 0xffffffff;
|
||||
return newStreamCRC;
|
||||
}
|
||||
}
|
||||
@@ -1,105 +0,0 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import { Bzip2Error, BZIP2_ERROR_CODES } from '../errors.js';
|
||||
import type { IBitReader } from '../interfaces.js';
|
||||
|
||||
import { Bzip2 } from './bzip2.js';
|
||||
import { bitIterator } from './bititerator.js';
|
||||
|
||||
/**
|
||||
* Creates a streaming BZIP2 decompression transform
|
||||
*/
|
||||
export function unbzip2Stream(): plugins.smartstream.SmartDuplex<Buffer, Buffer> {
|
||||
const bzip2Instance = new Bzip2();
|
||||
const bufferQueue: Buffer[] = [];
|
||||
let hasBytes = 0;
|
||||
let blockSize = 0;
|
||||
let broken = false;
|
||||
let bitReader: IBitReader | null = null;
|
||||
let streamCRC: number | null = null;
|
||||
|
||||
function decompressBlock(): Buffer | undefined {
|
||||
if (!blockSize) {
|
||||
blockSize = bzip2Instance.header(bitReader!);
|
||||
streamCRC = 0;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const bufsize = 100000 * blockSize;
|
||||
const buf = new Int32Array(bufsize);
|
||||
const chunk: number[] = [];
|
||||
|
||||
const outputFunc = (b: number): void => {
|
||||
chunk.push(b);
|
||||
};
|
||||
|
||||
streamCRC = bzip2Instance.decompress(bitReader!, outputFunc, buf, bufsize, streamCRC);
|
||||
|
||||
if (streamCRC === null) {
|
||||
// Reset for next bzip2 header
|
||||
blockSize = 0;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return Buffer.from(chunk);
|
||||
}
|
||||
|
||||
let outlength = 0;
|
||||
|
||||
const decompressAndPush = async (): Promise<Buffer | undefined> => {
|
||||
if (broken) return undefined;
|
||||
|
||||
try {
|
||||
const resultChunk = decompressBlock();
|
||||
if (resultChunk) {
|
||||
outlength += resultChunk.length;
|
||||
}
|
||||
return resultChunk;
|
||||
} catch (e) {
|
||||
broken = true;
|
||||
if (e instanceof Error) {
|
||||
throw new Bzip2Error(`Decompression failed: ${e.message}`, BZIP2_ERROR_CODES.INVALID_BLOCK_DATA);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
|
||||
return new plugins.smartstream.SmartDuplex<Buffer, Buffer>({
|
||||
objectMode: true,
|
||||
name: 'bzip2',
|
||||
highWaterMark: 1,
|
||||
writeFunction: async function (data, streamTools) {
|
||||
bufferQueue.push(data);
|
||||
hasBytes += data.length;
|
||||
|
||||
if (bitReader === null) {
|
||||
bitReader = bitIterator(function () {
|
||||
return bufferQueue.shift()!;
|
||||
});
|
||||
}
|
||||
|
||||
const threshold = 25000 + 100000 * blockSize || 4;
|
||||
while (!broken && hasBytes - bitReader.bytesRead + 1 >= threshold) {
|
||||
const result = await decompressAndPush();
|
||||
if (!result) {
|
||||
continue;
|
||||
}
|
||||
await streamTools.push(result);
|
||||
}
|
||||
return null;
|
||||
},
|
||||
finalFunction: async function (streamTools) {
|
||||
while (!broken && bitReader && hasBytes > bitReader.bytesRead) {
|
||||
const result = await decompressAndPush();
|
||||
if (!result) {
|
||||
continue;
|
||||
}
|
||||
await streamTools.push(result);
|
||||
}
|
||||
|
||||
if (!broken && streamCRC !== null) {
|
||||
this.emit('error', new Bzip2Error('Input stream ended prematurely', BZIP2_ERROR_CODES.PREMATURE_END));
|
||||
}
|
||||
return null;
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { SmartArchive } from './classes.smartarchive.js';
|
||||
import type { TSupportedMime } from './interfaces.js';
|
||||
import type { TSupportedMime } from '../ts_shared/interfaces.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
/**
|
||||
@@ -8,7 +8,7 @@ import * as plugins from './plugins.js';
|
||||
export type TDecompressionStream =
|
||||
| plugins.stream.Transform
|
||||
| plugins.stream.Duplex
|
||||
| plugins.tarStream.Extract;
|
||||
| plugins.smartstream.SmartDuplex<any, any>;
|
||||
|
||||
/**
|
||||
* Result of archive analysis
|
||||
@@ -53,14 +53,42 @@ export class ArchiveAnalyzer {
|
||||
*/
|
||||
private async getDecompressionStream(mimeTypeArg: TSupportedMime): Promise<TDecompressionStream> {
|
||||
switch (mimeTypeArg) {
|
||||
case 'application/gzip':
|
||||
return this.smartArchiveRef.gzipTools.getDecompressionStream();
|
||||
case 'application/gzip': {
|
||||
// Use fflate streaming Gunzip - instance must be created once and reused
|
||||
let gunzip: plugins.fflate.Gunzip;
|
||||
return new plugins.stream.Transform({
|
||||
construct(callback) {
|
||||
gunzip = new plugins.fflate.Gunzip((data, final) => {
|
||||
this.push(Buffer.from(data));
|
||||
});
|
||||
callback();
|
||||
},
|
||||
transform(chunk, encoding, callback) {
|
||||
try {
|
||||
gunzip.push(chunk, false);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
},
|
||||
flush(callback) {
|
||||
try {
|
||||
// Signal end of stream with empty final chunk
|
||||
gunzip.push(new Uint8Array(0), true);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
case 'application/zip':
|
||||
return this.smartArchiveRef.zipTools.getDecompressionStream();
|
||||
case 'application/x-bzip2':
|
||||
return this.smartArchiveRef.bzip2Tools.getDecompressionStream();
|
||||
case 'application/x-tar':
|
||||
return this.smartArchiveRef.tarTools.getDecompressionStream();
|
||||
// TAR doesn't need decompression, just pass through
|
||||
return plugins.smartstream.createPassThrough();
|
||||
default:
|
||||
// Handle unsupported formats or no decompression needed
|
||||
return plugins.smartstream.createPassThrough();
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
import type { SmartArchive } from './classes.smartarchive.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
import { unbzip2Stream } from './bzip2/index.js';
|
||||
|
||||
export class Bzip2Tools {
|
||||
smartArchiveRef: SmartArchive;
|
||||
|
||||
constructor(smartArchiveRefArg: SmartArchive) {
|
||||
this.smartArchiveRef = smartArchiveRefArg;
|
||||
}
|
||||
|
||||
getDecompressionStream() {
|
||||
return unbzip2Stream();
|
||||
}
|
||||
}
|
||||
@@ -1,138 +0,0 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import type { TCompressionLevel } from './interfaces.js';
|
||||
|
||||
/**
|
||||
* Transform stream for GZIP compression using fflate
|
||||
*/
|
||||
export class GzipCompressionTransform extends plugins.stream.Transform {
|
||||
private gzip: plugins.fflate.Gzip;
|
||||
|
||||
constructor(level: TCompressionLevel = 6) {
|
||||
super();
|
||||
|
||||
// Create a streaming Gzip compressor
|
||||
this.gzip = new plugins.fflate.Gzip({ level }, (chunk, final) => {
|
||||
this.push(Buffer.from(chunk));
|
||||
if (final) {
|
||||
this.push(null);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_transform(
|
||||
chunk: Buffer,
|
||||
encoding: BufferEncoding,
|
||||
callback: plugins.stream.TransformCallback
|
||||
): void {
|
||||
try {
|
||||
this.gzip.push(chunk, false);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
}
|
||||
|
||||
_flush(callback: plugins.stream.TransformCallback): void {
|
||||
try {
|
||||
this.gzip.push(new Uint8Array(0), true);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform stream for GZIP decompression using fflate
|
||||
*/
|
||||
export class GzipDecompressionTransform extends plugins.stream.Transform {
|
||||
private gunzip: plugins.fflate.Gunzip;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
|
||||
// Create a streaming Gunzip decompressor
|
||||
this.gunzip = new plugins.fflate.Gunzip((chunk, final) => {
|
||||
this.push(Buffer.from(chunk));
|
||||
if (final) {
|
||||
this.push(null);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_transform(
|
||||
chunk: Buffer,
|
||||
encoding: BufferEncoding,
|
||||
callback: plugins.stream.TransformCallback
|
||||
): void {
|
||||
try {
|
||||
this.gunzip.push(chunk, false);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
}
|
||||
|
||||
_flush(callback: plugins.stream.TransformCallback): void {
|
||||
try {
|
||||
this.gunzip.push(new Uint8Array(0), true);
|
||||
callback();
|
||||
} catch (err) {
|
||||
callback(err as Error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GZIP compression and decompression utilities
|
||||
*/
|
||||
export class GzipTools {
|
||||
/**
|
||||
* Get a streaming compression transform
|
||||
*/
|
||||
public getCompressionStream(level?: TCompressionLevel): plugins.stream.Transform {
|
||||
return new GzipCompressionTransform(level);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a streaming decompression transform
|
||||
*/
|
||||
public getDecompressionStream(): plugins.stream.Transform {
|
||||
return new GzipDecompressionTransform();
|
||||
}
|
||||
|
||||
/**
|
||||
* Compress data synchronously
|
||||
*/
|
||||
public compressSync(data: Buffer, level?: TCompressionLevel): Buffer {
|
||||
const options = level !== undefined ? { level } : undefined;
|
||||
return Buffer.from(plugins.fflate.gzipSync(data, options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Decompress data synchronously
|
||||
*/
|
||||
public decompressSync(data: Buffer): Buffer {
|
||||
return Buffer.from(plugins.fflate.gunzipSync(data));
|
||||
}
|
||||
|
||||
/**
|
||||
* Compress data asynchronously
|
||||
* Note: Uses sync version for Deno compatibility (fflate async uses Web Workers
|
||||
* which have issues in Deno)
|
||||
*/
|
||||
public async compress(data: Buffer, level?: TCompressionLevel): Promise<Buffer> {
|
||||
// Use sync version wrapped in Promise for cross-runtime compatibility
|
||||
return this.compressSync(data, level);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decompress data asynchronously
|
||||
* Note: Uses sync version for Deno compatibility (fflate async uses Web Workers
|
||||
* which have issues in Deno)
|
||||
*/
|
||||
public async decompress(data: Buffer): Promise<Buffer> {
|
||||
// Use sync version wrapped in Promise for cross-runtime compatibility
|
||||
return this.decompressSync(data);
|
||||
}
|
||||
}
|
||||
@@ -6,12 +6,15 @@ import type {
|
||||
TArchiveFormat,
|
||||
TCompressionLevel,
|
||||
TEntryFilter,
|
||||
} from './interfaces.js';
|
||||
} from '../ts_shared/interfaces.js';
|
||||
|
||||
import { Bzip2Tools } from './classes.bzip2tools.js';
|
||||
import { GzipTools } from './classes.gziptools.js';
|
||||
// Import browser-compatible tools from ts_shared
|
||||
import { Bzip2Tools } from '../ts_shared/classes.bzip2tools.js';
|
||||
import { GzipTools } from '../ts_shared/classes.gziptools.js';
|
||||
import { ZipTools } from '../ts_shared/classes.ziptools.js';
|
||||
|
||||
// Import Node.js-extended TarTools
|
||||
import { TarTools } from './classes.tartools.js';
|
||||
import { ZipTools } from './classes.ziptools.js';
|
||||
import { ArchiveAnalyzer, type IAnalyzedResult } from './classes.archiveanalyzer.js';
|
||||
|
||||
/**
|
||||
@@ -62,7 +65,7 @@ export class SmartArchive {
|
||||
public tarTools = new TarTools();
|
||||
public zipTools = new ZipTools();
|
||||
public gzipTools = new GzipTools();
|
||||
public bzip2Tools = new Bzip2Tools(this);
|
||||
public bzip2Tools = new Bzip2Tools();
|
||||
public archiveAnalyzer = new ArchiveAnalyzer(this);
|
||||
|
||||
// ============================================
|
||||
@@ -173,7 +176,7 @@ export class SmartArchive {
|
||||
public entry(archivePath: string, content: string | Buffer): this {
|
||||
this.ensureNotInExtractMode('entry');
|
||||
if (!this._mode) this._mode = 'create';
|
||||
this.pendingEntries.push({ archivePath, content });
|
||||
this.pendingEntries.push({ archivePath, content: content instanceof Buffer ? new Uint8Array(content) : content });
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -184,7 +187,10 @@ export class SmartArchive {
|
||||
this.ensureNotInExtractMode('entries');
|
||||
if (!this._mode) this._mode = 'create';
|
||||
for (const e of entriesArg) {
|
||||
this.pendingEntries.push({ archivePath: e.archivePath, content: e.content });
|
||||
this.pendingEntries.push({
|
||||
archivePath: e.archivePath,
|
||||
content: e.content instanceof Buffer ? new Uint8Array(e.content) : e.content
|
||||
});
|
||||
}
|
||||
return this;
|
||||
}
|
||||
@@ -374,30 +380,36 @@ export class SmartArchive {
|
||||
plugins.smartstream.createTransformFunction<IAnalyzedResult, void>(
|
||||
async (analyzedResultChunk) => {
|
||||
if (analyzedResultChunk.fileType?.mime === 'application/x-tar') {
|
||||
const tarStream = analyzedResultChunk.decompressionStream as plugins.tarStream.Extract;
|
||||
// Use modern-tar for TAR extraction
|
||||
const chunks: Buffer[] = [];
|
||||
|
||||
tarStream.on('entry', async (header, stream, next) => {
|
||||
if (header.type === 'directory') {
|
||||
stream.resume();
|
||||
stream.on('end', () => next());
|
||||
return;
|
||||
analyzedResultChunk.resultStream.on('data', (chunk: Buffer) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
|
||||
analyzedResultChunk.resultStream.on('end', async () => {
|
||||
try {
|
||||
const tarBuffer = Buffer.concat(chunks);
|
||||
const entries = await this.tarTools.extractTar(new Uint8Array(tarBuffer));
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory) continue;
|
||||
|
||||
const streamFile = plugins.smartfile.StreamFile.fromBuffer(
|
||||
Buffer.from(entry.content)
|
||||
);
|
||||
streamFile.relativeFilePath = entry.path;
|
||||
streamFileIntake.push(streamFile);
|
||||
}
|
||||
safeSignalEnd();
|
||||
} catch (err) {
|
||||
streamFileIntake.emit('error', err);
|
||||
}
|
||||
|
||||
const passThrough = new plugins.stream.PassThrough();
|
||||
const streamfile = plugins.smartfile.StreamFile.fromStream(passThrough, header.name);
|
||||
streamFileIntake.push(streamfile);
|
||||
stream.pipe(passThrough);
|
||||
stream.on('end', () => {
|
||||
passThrough.end();
|
||||
next();
|
||||
});
|
||||
});
|
||||
|
||||
tarStream.on('finish', () => {
|
||||
safeSignalEnd();
|
||||
analyzedResultChunk.resultStream.on('error', (err: Error) => {
|
||||
streamFileIntake.emit('error', err);
|
||||
});
|
||||
|
||||
analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream);
|
||||
} else if (analyzedResultChunk.fileType?.mime === 'application/zip') {
|
||||
analyzedResultChunk.resultStream
|
||||
.pipe(analyzedResultChunk.decompressionStream)
|
||||
@@ -544,25 +556,29 @@ export class SmartArchive {
|
||||
|
||||
if (this.creationFormat === 'tar' || this.creationFormat === 'tar.gz' || this.creationFormat === 'tgz') {
|
||||
if (this.creationFormat === 'tar') {
|
||||
this.archiveBuffer = await this.tarTools.packFiles(entries);
|
||||
const result = await this.tarTools.packFiles(entries);
|
||||
this.archiveBuffer = Buffer.from(result);
|
||||
} else {
|
||||
this.archiveBuffer = await this.tarTools.packFilesToTarGz(entries, this._compressionLevel);
|
||||
const result = await this.tarTools.packFilesToTarGz(entries, this._compressionLevel);
|
||||
this.archiveBuffer = Buffer.from(result);
|
||||
}
|
||||
} else if (this.creationFormat === 'zip') {
|
||||
this.archiveBuffer = await this.zipTools.createZip(entries, this._compressionLevel);
|
||||
const result = await this.zipTools.createZip(entries, this._compressionLevel);
|
||||
this.archiveBuffer = Buffer.from(result);
|
||||
} else if (this.creationFormat === 'gz') {
|
||||
if (entries.length !== 1) {
|
||||
throw new Error('GZIP format only supports a single file');
|
||||
}
|
||||
let content: Buffer;
|
||||
let content: Uint8Array;
|
||||
if (typeof entries[0].content === 'string') {
|
||||
content = Buffer.from(entries[0].content);
|
||||
} else if (Buffer.isBuffer(entries[0].content)) {
|
||||
content = new TextEncoder().encode(entries[0].content);
|
||||
} else if (entries[0].content instanceof Uint8Array) {
|
||||
content = entries[0].content;
|
||||
} else {
|
||||
throw new Error('GZIP format requires string or Buffer content');
|
||||
throw new Error('GZIP format requires string or Uint8Array content');
|
||||
}
|
||||
this.archiveBuffer = await this.gzipTools.compress(content, this._compressionLevel);
|
||||
const result = await this.gzipTools.compress(content, this._compressionLevel);
|
||||
this.archiveBuffer = Buffer.from(result);
|
||||
} else {
|
||||
throw new Error(`Unsupported format: ${this.creationFormat}`);
|
||||
}
|
||||
@@ -808,7 +824,7 @@ export class SmartArchive {
|
||||
const content = await plugins.fsPromises.readFile(absolutePath);
|
||||
this.pendingEntries.push({
|
||||
archivePath,
|
||||
content,
|
||||
content: new Uint8Array(content),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,208 +1,51 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import type { IArchiveEntry, TCompressionLevel } from './interfaces.js';
|
||||
import { GzipTools } from './classes.gziptools.js';
|
||||
import type { IArchiveEntry, TCompressionLevel } from '../ts_shared/interfaces.js';
|
||||
import { TarTools as SharedTarTools } from '../ts_shared/classes.tartools.js';
|
||||
import { GzipTools } from '../ts_shared/classes.gziptools.js';
|
||||
|
||||
/**
|
||||
* TAR archive creation and extraction utilities
|
||||
* Extended TAR archive utilities with Node.js filesystem support
|
||||
*/
|
||||
export class TarTools {
|
||||
export class TarTools extends SharedTarTools {
|
||||
/**
|
||||
* Add a file to a TAR pack stream
|
||||
* Pack a directory into a TAR buffer (Node.js only)
|
||||
*/
|
||||
public async addFileToPack(
|
||||
pack: plugins.tarStream.Pack,
|
||||
optionsArg: {
|
||||
fileName?: string;
|
||||
content?:
|
||||
| string
|
||||
| Buffer
|
||||
| plugins.stream.Readable
|
||||
| plugins.smartfile.SmartFile
|
||||
| plugins.smartfile.StreamFile;
|
||||
byteLength?: number;
|
||||
filePath?: string;
|
||||
}
|
||||
): Promise<void> {
|
||||
return new Promise<void>(async (resolve, reject) => {
|
||||
let fileName: string | null = null;
|
||||
|
||||
if (optionsArg.fileName) {
|
||||
fileName = optionsArg.fileName;
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
|
||||
fileName = optionsArg.content.relative;
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
|
||||
fileName = optionsArg.content.relativeFilePath;
|
||||
} else if (optionsArg.filePath) {
|
||||
fileName = optionsArg.filePath;
|
||||
}
|
||||
|
||||
if (!fileName) {
|
||||
reject(new Error('No filename specified for TAR entry'));
|
||||
return;
|
||||
}
|
||||
|
||||
// Determine content byte length
|
||||
let contentByteLength: number | undefined;
|
||||
if (optionsArg.byteLength) {
|
||||
contentByteLength = optionsArg.byteLength;
|
||||
} else if (typeof optionsArg.content === 'string') {
|
||||
contentByteLength = Buffer.byteLength(optionsArg.content, 'utf8');
|
||||
} else if (Buffer.isBuffer(optionsArg.content)) {
|
||||
contentByteLength = optionsArg.content.length;
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
|
||||
contentByteLength = await optionsArg.content.getSize();
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
|
||||
contentByteLength = await optionsArg.content.getSize();
|
||||
} else if (optionsArg.filePath) {
|
||||
const fileStat = await plugins.fsPromises.stat(optionsArg.filePath);
|
||||
contentByteLength = fileStat.size;
|
||||
}
|
||||
|
||||
// Convert all content types to Readable stream
|
||||
let content: plugins.stream.Readable;
|
||||
if (Buffer.isBuffer(optionsArg.content)) {
|
||||
content = plugins.stream.Readable.from(optionsArg.content);
|
||||
} else if (typeof optionsArg.content === 'string') {
|
||||
content = plugins.stream.Readable.from(Buffer.from(optionsArg.content));
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
|
||||
content = plugins.stream.Readable.from(optionsArg.content.contents);
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
|
||||
content = await optionsArg.content.createReadStream();
|
||||
} else if (optionsArg.content instanceof plugins.stream.Readable) {
|
||||
content = optionsArg.content;
|
||||
} else if (optionsArg.filePath) {
|
||||
content = plugins.fs.createReadStream(optionsArg.filePath);
|
||||
} else {
|
||||
reject(new Error('No content or filePath specified for TAR entry'));
|
||||
return;
|
||||
}
|
||||
|
||||
const entry = pack.entry(
|
||||
{
|
||||
name: fileName,
|
||||
...(contentByteLength !== undefined ? { size: contentByteLength } : {}),
|
||||
},
|
||||
(err: Error | null) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
content.pipe(entry);
|
||||
// Note: resolve() is called in the callback above when pipe completes
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack a directory into a TAR stream
|
||||
*/
|
||||
public async packDirectory(directoryPath: string): Promise<plugins.tarStream.Pack> {
|
||||
public async packDirectory(directoryPath: string): Promise<Uint8Array> {
|
||||
const fileTree = await plugins.listFileTree(directoryPath, '**/*');
|
||||
const pack = await this.getPackStream();
|
||||
const entries: IArchiveEntry[] = [];
|
||||
|
||||
for (const filePath of fileTree) {
|
||||
const absolutePath = plugins.path.join(directoryPath, filePath);
|
||||
const fileStat = await plugins.fsPromises.stat(absolutePath);
|
||||
await this.addFileToPack(pack, {
|
||||
byteLength: fileStat.size,
|
||||
filePath: absolutePath,
|
||||
fileName: filePath,
|
||||
content: plugins.fs.createReadStream(absolutePath),
|
||||
const content = await plugins.fsPromises.readFile(absolutePath);
|
||||
entries.push({
|
||||
archivePath: filePath,
|
||||
content: new Uint8Array(content),
|
||||
});
|
||||
}
|
||||
|
||||
return pack;
|
||||
return this.packFiles(entries);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a new TAR pack stream
|
||||
*/
|
||||
public async getPackStream(): Promise<plugins.tarStream.Pack> {
|
||||
return plugins.tarStream.pack();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a TAR extraction stream
|
||||
*/
|
||||
public getDecompressionStream(): plugins.tarStream.Extract {
|
||||
return plugins.tarStream.extract();
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack files into a TAR buffer
|
||||
*/
|
||||
public async packFiles(files: IArchiveEntry[]): Promise<Buffer> {
|
||||
const pack = await this.getPackStream();
|
||||
|
||||
for (const file of files) {
|
||||
await this.addFileToPack(pack, {
|
||||
fileName: file.archivePath,
|
||||
content: file.content as string | Buffer | plugins.stream.Readable | plugins.smartfile.SmartFile | plugins.smartfile.StreamFile,
|
||||
byteLength: file.size,
|
||||
});
|
||||
}
|
||||
|
||||
pack.finalize();
|
||||
|
||||
const chunks: Buffer[] = [];
|
||||
return new Promise((resolve, reject) => {
|
||||
pack.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
pack.on('end', () => resolve(Buffer.concat(chunks)));
|
||||
pack.on('error', reject);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack a directory into a TAR.GZ buffer
|
||||
* Pack a directory into a TAR.GZ buffer (Node.js only)
|
||||
*/
|
||||
public async packDirectoryToTarGz(
|
||||
directoryPath: string,
|
||||
compressionLevel?: TCompressionLevel
|
||||
): Promise<Buffer> {
|
||||
const pack = await this.packDirectory(directoryPath);
|
||||
pack.finalize();
|
||||
|
||||
): Promise<Uint8Array> {
|
||||
const tarBuffer = await this.packDirectory(directoryPath);
|
||||
const gzipTools = new GzipTools();
|
||||
const gzipStream = gzipTools.getCompressionStream(compressionLevel);
|
||||
|
||||
const chunks: Buffer[] = [];
|
||||
return new Promise((resolve, reject) => {
|
||||
pack
|
||||
.pipe(gzipStream)
|
||||
.on('data', (chunk: Buffer) => chunks.push(chunk))
|
||||
.on('end', () => resolve(Buffer.concat(chunks)))
|
||||
.on('error', reject);
|
||||
});
|
||||
return gzipTools.compress(tarBuffer, compressionLevel);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack a directory into a TAR.GZ stream
|
||||
* Pack a directory into a TAR.GZ stream (Node.js only)
|
||||
*/
|
||||
public async packDirectoryToTarGzStream(
|
||||
directoryPath: string,
|
||||
compressionLevel?: TCompressionLevel
|
||||
): Promise<plugins.stream.Readable> {
|
||||
const pack = await this.packDirectory(directoryPath);
|
||||
pack.finalize();
|
||||
|
||||
const gzipTools = new GzipTools();
|
||||
const gzipStream = gzipTools.getCompressionStream(compressionLevel);
|
||||
|
||||
return pack.pipe(gzipStream);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack files into a TAR.GZ buffer
|
||||
*/
|
||||
public async packFilesToTarGz(
|
||||
files: IArchiveEntry[],
|
||||
compressionLevel?: TCompressionLevel
|
||||
): Promise<Buffer> {
|
||||
const tarBuffer = await this.packFiles(files);
|
||||
const gzipTools = new GzipTools();
|
||||
return gzipTools.compress(tarBuffer, compressionLevel);
|
||||
const buffer = await this.packDirectoryToTarGz(directoryPath, compressionLevel);
|
||||
return plugins.stream.Readable.from(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,196 +0,0 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import type { IArchiveEntry, TCompressionLevel } from './interfaces.js';
|
||||
|
||||
/**
|
||||
* Transform stream for ZIP decompression using fflate
|
||||
* Emits StreamFile objects for each file in the archive
|
||||
*/
|
||||
export class ZipDecompressionTransform extends plugins.smartstream.SmartDuplex<Buffer, plugins.smartfile.StreamFile> {
|
||||
private streamtools!: plugins.smartstream.IStreamTools;
|
||||
private unzipper = new plugins.fflate.Unzip(async (fileArg) => {
|
||||
let resultBuffer: Buffer;
|
||||
fileArg.ondata = async (_flateError, dat, final) => {
|
||||
resultBuffer
|
||||
? (resultBuffer = Buffer.concat([resultBuffer, Buffer.from(dat)]))
|
||||
: (resultBuffer = Buffer.from(dat));
|
||||
if (final) {
|
||||
const streamFile = plugins.smartfile.StreamFile.fromBuffer(resultBuffer);
|
||||
streamFile.relativeFilePath = fileArg.name;
|
||||
this.streamtools.push(streamFile);
|
||||
}
|
||||
};
|
||||
fileArg.start();
|
||||
});
|
||||
|
||||
constructor() {
|
||||
super({
|
||||
objectMode: true,
|
||||
writeFunction: async (chunkArg, streamtoolsArg) => {
|
||||
this.streamtools ? null : (this.streamtools = streamtoolsArg);
|
||||
this.unzipper.push(
|
||||
Buffer.isBuffer(chunkArg) ? chunkArg : Buffer.from(chunkArg as unknown as ArrayBuffer),
|
||||
false
|
||||
);
|
||||
return null;
|
||||
},
|
||||
finalFunction: async () => {
|
||||
this.unzipper.push(Buffer.from(''), true);
|
||||
await plugins.smartdelay.delayFor(0);
|
||||
await this.streamtools.push(null);
|
||||
return null;
|
||||
},
|
||||
});
|
||||
this.unzipper.register(plugins.fflate.UnzipInflate);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Streaming ZIP compression using fflate
|
||||
* Allows adding multiple entries before finalizing
|
||||
*/
|
||||
export class ZipCompressionStream extends plugins.stream.Duplex {
|
||||
private files: Map<string, { data: Uint8Array; options?: plugins.fflate.ZipOptions }> = new Map();
|
||||
private finalized = false;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a file entry to the ZIP archive
|
||||
*/
|
||||
public async addEntry(
|
||||
fileName: string,
|
||||
content: Buffer | plugins.stream.Readable,
|
||||
options?: { compressionLevel?: TCompressionLevel }
|
||||
): Promise<void> {
|
||||
if (this.finalized) {
|
||||
throw new Error('Cannot add entries to a finalized ZIP archive');
|
||||
}
|
||||
|
||||
let data: Buffer;
|
||||
if (Buffer.isBuffer(content)) {
|
||||
data = content;
|
||||
} else {
|
||||
// Collect stream to buffer
|
||||
const chunks: Buffer[] = [];
|
||||
for await (const chunk of content) {
|
||||
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
||||
}
|
||||
data = Buffer.concat(chunks);
|
||||
}
|
||||
|
||||
this.files.set(fileName, {
|
||||
data: new Uint8Array(data),
|
||||
options: options?.compressionLevel !== undefined ? { level: options.compressionLevel } : undefined,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Finalize the ZIP archive and emit the compressed data
|
||||
*/
|
||||
public async finalize(): Promise<void> {
|
||||
if (this.finalized) {
|
||||
return;
|
||||
}
|
||||
this.finalized = true;
|
||||
|
||||
const filesObj: plugins.fflate.Zippable = {};
|
||||
for (const [name, { data, options }] of this.files) {
|
||||
filesObj[name] = options ? [data, options] : data;
|
||||
}
|
||||
|
||||
// Use sync version for Deno compatibility (fflate async uses Web Workers)
|
||||
try {
|
||||
const result = plugins.fflate.zipSync(filesObj);
|
||||
this.push(Buffer.from(result));
|
||||
this.push(null);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
_read(): void {
|
||||
// No-op: data is pushed when finalize() is called
|
||||
}
|
||||
|
||||
_write(
|
||||
_chunk: Buffer,
|
||||
_encoding: BufferEncoding,
|
||||
callback: (error?: Error | null) => void
|
||||
): void {
|
||||
// Not used for ZIP creation - use addEntry() instead
|
||||
callback(new Error('Use addEntry() to add files to the ZIP archive'));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* ZIP compression and decompression utilities
|
||||
*/
|
||||
export class ZipTools {
|
||||
/**
|
||||
* Get a streaming compression object for creating ZIP archives
|
||||
*/
|
||||
public getCompressionStream(): ZipCompressionStream {
|
||||
return new ZipCompressionStream();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a streaming decompression transform for extracting ZIP archives
|
||||
*/
|
||||
public getDecompressionStream(): ZipDecompressionTransform {
|
||||
return new ZipDecompressionTransform();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a ZIP archive from an array of entries
|
||||
*/
|
||||
public async createZip(entries: IArchiveEntry[], compressionLevel?: TCompressionLevel): Promise<Buffer> {
|
||||
const filesObj: plugins.fflate.Zippable = {};
|
||||
|
||||
for (const entry of entries) {
|
||||
let data: Uint8Array;
|
||||
|
||||
if (typeof entry.content === 'string') {
|
||||
data = new TextEncoder().encode(entry.content);
|
||||
} else if (Buffer.isBuffer(entry.content)) {
|
||||
data = new Uint8Array(entry.content);
|
||||
} else if (entry.content instanceof plugins.smartfile.SmartFile) {
|
||||
data = new Uint8Array(entry.content.contents);
|
||||
} else if (entry.content instanceof plugins.smartfile.StreamFile) {
|
||||
const buffer = await entry.content.getContentAsBuffer();
|
||||
data = new Uint8Array(buffer);
|
||||
} else {
|
||||
// Readable stream
|
||||
const chunks: Buffer[] = [];
|
||||
for await (const chunk of entry.content as plugins.stream.Readable) {
|
||||
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
||||
}
|
||||
data = new Uint8Array(Buffer.concat(chunks));
|
||||
}
|
||||
|
||||
if (compressionLevel !== undefined) {
|
||||
filesObj[entry.archivePath] = [data, { level: compressionLevel }];
|
||||
} else {
|
||||
filesObj[entry.archivePath] = data;
|
||||
}
|
||||
}
|
||||
|
||||
// Use sync version for Deno compatibility (fflate async uses Web Workers)
|
||||
const result = plugins.fflate.zipSync(filesObj);
|
||||
return Buffer.from(result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a ZIP buffer to an array of entries
|
||||
*/
|
||||
public async extractZip(data: Buffer): Promise<Array<{ path: string; content: Buffer }>> {
|
||||
// Use sync version for Deno compatibility (fflate async uses Web Workers)
|
||||
const result = plugins.fflate.unzipSync(data);
|
||||
const entries: Array<{ path: string; content: Buffer }> = [];
|
||||
for (const [path, content] of Object.entries(result)) {
|
||||
entries.push({ path, content: Buffer.from(content) });
|
||||
}
|
||||
return entries;
|
||||
}
|
||||
}
|
||||
70
ts/errors.ts
70
ts/errors.ts
@@ -1,70 +0,0 @@
|
||||
/**
|
||||
* Base error class for smartarchive
|
||||
*/
|
||||
export class SmartArchiveError extends Error {
|
||||
public readonly code: string;
|
||||
|
||||
constructor(message: string, code: string) {
|
||||
super(message);
|
||||
this.name = 'SmartArchiveError';
|
||||
this.code = code;
|
||||
// Maintains proper stack trace for where error was thrown (V8)
|
||||
if (Error.captureStackTrace) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* BZIP2-specific decompression errors
|
||||
*/
|
||||
export class Bzip2Error extends SmartArchiveError {
|
||||
constructor(message: string, code: string = 'BZIP2_ERROR') {
|
||||
super(message, code);
|
||||
this.name = 'Bzip2Error';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Archive format detection errors
|
||||
*/
|
||||
export class ArchiveFormatError extends SmartArchiveError {
|
||||
constructor(message: string) {
|
||||
super(message, 'ARCHIVE_FORMAT_ERROR');
|
||||
this.name = 'ArchiveFormatError';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stream processing errors
|
||||
*/
|
||||
export class StreamError extends SmartArchiveError {
|
||||
constructor(message: string) {
|
||||
super(message, 'STREAM_ERROR');
|
||||
this.name = 'StreamError';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* BZIP2 error codes for programmatic error handling
|
||||
*/
|
||||
export const BZIP2_ERROR_CODES = {
|
||||
NO_MAGIC_NUMBER: 'BZIP2_NO_MAGIC',
|
||||
INVALID_ARCHIVE: 'BZIP2_INVALID_ARCHIVE',
|
||||
CRC_MISMATCH: 'BZIP2_CRC_MISMATCH',
|
||||
INVALID_BLOCK_DATA: 'BZIP2_INVALID_BLOCK',
|
||||
BUFFER_OVERFLOW: 'BZIP2_BUFFER_OVERFLOW',
|
||||
INVALID_HUFFMAN: 'BZIP2_INVALID_HUFFMAN',
|
||||
INVALID_SELECTOR: 'BZIP2_INVALID_SELECTOR',
|
||||
INVALID_POSITION: 'BZIP2_INVALID_POSITION',
|
||||
PREMATURE_END: 'BZIP2_PREMATURE_END',
|
||||
} as const;
|
||||
|
||||
export type TBzip2ErrorCode = typeof BZIP2_ERROR_CODES[keyof typeof BZIP2_ERROR_CODES];
|
||||
|
||||
/**
|
||||
* Throw a BZIP2 error with a specific code
|
||||
*/
|
||||
export function throwBzip2Error(message: string, code: TBzip2ErrorCode): never {
|
||||
throw new Bzip2Error(message, code);
|
||||
}
|
||||
18
ts/index.ts
18
ts/index.ts
@@ -1,15 +1,11 @@
|
||||
// Core types and errors
|
||||
export * from './interfaces.js';
|
||||
export * from './errors.js';
|
||||
// Re-export everything from ts_shared (browser-compatible)
|
||||
export * from '../ts_shared/index.js';
|
||||
|
||||
// Main archive class
|
||||
// Node.js-specific: Main archive class with filesystem support
|
||||
export * from './classes.smartarchive.js';
|
||||
|
||||
// Format-specific tools
|
||||
export * from './classes.tartools.js';
|
||||
export * from './classes.ziptools.js';
|
||||
export * from './classes.gziptools.js';
|
||||
export * from './classes.bzip2tools.js';
|
||||
|
||||
// Archive analysis
|
||||
// Node.js-specific: Archive analysis with SmartArchive integration
|
||||
export * from './classes.archiveanalyzer.js';
|
||||
|
||||
// Node.js-specific: Extended TarTools with filesystem support (overrides shared TarTools)
|
||||
export { TarTools } from './classes.tartools.js';
|
||||
|
||||
136
ts/interfaces.ts
136
ts/interfaces.ts
@@ -1,136 +0,0 @@
|
||||
import type * as stream from 'node:stream';
|
||||
import type { SmartFile, StreamFile } from '@push.rocks/smartfile';
|
||||
|
||||
/**
|
||||
* Supported archive formats
|
||||
*/
|
||||
export type TArchiveFormat = 'tar' | 'tar.gz' | 'tgz' | 'zip' | 'gz' | 'bz2';
|
||||
|
||||
/**
|
||||
* Compression level (0 = no compression, 9 = maximum compression)
|
||||
*/
|
||||
export type TCompressionLevel = 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9;
|
||||
|
||||
/**
|
||||
* Supported MIME types for archive detection
|
||||
*/
|
||||
export type TSupportedMime =
|
||||
| 'application/gzip'
|
||||
| 'application/zip'
|
||||
| 'application/x-bzip2'
|
||||
| 'application/x-tar'
|
||||
| undefined;
|
||||
|
||||
/**
|
||||
* Entry to add to an archive during creation
|
||||
*/
|
||||
export interface IArchiveEntry {
|
||||
/** Path within the archive */
|
||||
archivePath: string;
|
||||
/** Content: string, Buffer, Readable stream, SmartFile, or StreamFile */
|
||||
content: string | Buffer | stream.Readable | SmartFile | StreamFile;
|
||||
/** Optional size hint for streams (improves performance) */
|
||||
size?: number;
|
||||
/** Optional file mode/permissions */
|
||||
mode?: number;
|
||||
/** Optional modification time */
|
||||
mtime?: Date;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for creating archives
|
||||
*/
|
||||
export interface IArchiveCreationOptions {
|
||||
/** Target archive format */
|
||||
format: TArchiveFormat;
|
||||
/** Compression level (0-9, default depends on format) */
|
||||
compressionLevel?: TCompressionLevel;
|
||||
/** Base path to strip from file paths in archive */
|
||||
basePath?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for extracting archives
|
||||
*/
|
||||
export interface IArchiveExtractionOptions {
|
||||
/** Target directory for extraction */
|
||||
targetDir: string;
|
||||
/** Optional filename for single-file archives (gz, bz2) */
|
||||
fileName?: string;
|
||||
/** Number of leading path components to strip */
|
||||
stripComponents?: number;
|
||||
/** Filter function to select which entries to extract */
|
||||
filter?: (entry: IArchiveEntryInfo) => boolean;
|
||||
/** Whether to overwrite existing files */
|
||||
overwrite?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Information about an archive entry
|
||||
*/
|
||||
export interface IArchiveEntryInfo {
|
||||
/** Path of the entry within the archive */
|
||||
path: string;
|
||||
/** Size in bytes */
|
||||
size: number;
|
||||
/** Whether this entry is a directory */
|
||||
isDirectory: boolean;
|
||||
/** Whether this entry is a file */
|
||||
isFile: boolean;
|
||||
/** Modification time */
|
||||
mtime?: Date;
|
||||
/** File mode/permissions */
|
||||
mode?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of archive analysis
|
||||
*/
|
||||
export interface IArchiveInfo {
|
||||
/** Detected archive format */
|
||||
format: TArchiveFormat | null;
|
||||
/** Whether the archive is compressed */
|
||||
isCompressed: boolean;
|
||||
/** Whether this is a recognized archive format */
|
||||
isArchive: boolean;
|
||||
/** List of entries (if available without full extraction) */
|
||||
entries?: IArchiveEntryInfo[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for adding a file to a TAR pack stream
|
||||
*/
|
||||
export interface IAddFileOptions {
|
||||
/** Filename within the archive */
|
||||
fileName?: string;
|
||||
/** File content */
|
||||
content?: string | Buffer | stream.Readable | SmartFile | StreamFile;
|
||||
/** Size in bytes (required for streams) */
|
||||
byteLength?: number;
|
||||
/** Path to file on disk (alternative to content) */
|
||||
filePath?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Bit reader interface for BZIP2 decompression
|
||||
*/
|
||||
export interface IBitReader {
|
||||
(n: number | null): number | void;
|
||||
bytesRead: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Huffman group for BZIP2 decompression
|
||||
*/
|
||||
export interface IHuffmanGroup {
|
||||
permute: Int32Array;
|
||||
limit: Int32Array;
|
||||
base: Int32Array;
|
||||
minLen: number;
|
||||
maxLen: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Entry filter predicate for fluent API
|
||||
*/
|
||||
export type TEntryFilter = (entry: IArchiveEntryInfo) => boolean;
|
||||
@@ -1,4 +1,4 @@
|
||||
// node native scope
|
||||
// Node.js native scope
|
||||
import * as path from 'node:path';
|
||||
import * as stream from 'node:stream';
|
||||
import * as fs from 'node:fs';
|
||||
@@ -30,32 +30,20 @@ export async function listFileTree(dirPath: string, _pattern: string = '**/*'):
|
||||
return results;
|
||||
}
|
||||
|
||||
// @pushrocks scope
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartdelay from '@push.rocks/smartdelay';
|
||||
// Re-export browser-compatible plugins from ts_shared
|
||||
export * from '../ts_shared/plugins.js';
|
||||
|
||||
// Additional Node.js-specific @pushrocks packages
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smartrequest from '@push.rocks/smartrequest';
|
||||
import * as smartunique from '@push.rocks/smartunique';
|
||||
import * as smartstream from '@push.rocks/smartstream';
|
||||
import * as smartrx from '@push.rocks/smartrx';
|
||||
import * as smarturl from '@push.rocks/smarturl';
|
||||
|
||||
export {
|
||||
smartfile,
|
||||
smartdelay,
|
||||
smartpath,
|
||||
smartpromise,
|
||||
smartrequest,
|
||||
smartunique,
|
||||
smartstream,
|
||||
smartrx,
|
||||
smarturl,
|
||||
};
|
||||
|
||||
// third party scope
|
||||
import * as fileType from 'file-type';
|
||||
import * as fflate from 'fflate';
|
||||
import tarStream from 'tar-stream';
|
||||
|
||||
export { fileType, fflate, tarStream };
|
||||
|
||||
Reference in New Issue
Block a user