Compare commits
12 Commits
Author | SHA1 | Date | |
---|---|---|---|
25e847a9ea | |||
cc0ecb3f16 | |||
2cd0846c74 | |||
49ab40af09 | |||
5ff51ff88d | |||
c578a3fdc1 | |||
ad0352a712 | |||
f921338fd6 | |||
614dae5ade | |||
f87359fb97 | |||
21da75c09a | |||
fe49d25765 |
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@push.rocks/smartarchive",
|
"name": "@push.rocks/smartarchive",
|
||||||
"version": "4.0.13",
|
"version": "4.0.19",
|
||||||
"description": "work with archives",
|
"description": "work with archives",
|
||||||
"main": "dist_ts/index.js",
|
"main": "dist_ts/index.js",
|
||||||
"typings": "dist_ts/index.d.ts",
|
"typings": "dist_ts/index.d.ts",
|
||||||
@ -26,7 +26,7 @@
|
|||||||
"@push.rocks/smartpromise": "^4.0.3",
|
"@push.rocks/smartpromise": "^4.0.3",
|
||||||
"@push.rocks/smartrequest": "^2.0.21",
|
"@push.rocks/smartrequest": "^2.0.21",
|
||||||
"@push.rocks/smartrx": "^3.0.7",
|
"@push.rocks/smartrx": "^3.0.7",
|
||||||
"@push.rocks/smartstream": "^3.0.25",
|
"@push.rocks/smartstream": "^3.0.30",
|
||||||
"@push.rocks/smartunique": "^3.0.6",
|
"@push.rocks/smartunique": "^3.0.6",
|
||||||
"@push.rocks/smarturl": "^3.0.7",
|
"@push.rocks/smarturl": "^3.0.7",
|
||||||
"@types/tar-stream": "^3.1.3",
|
"@types/tar-stream": "^3.1.3",
|
||||||
|
10
pnpm-lock.yaml
generated
10
pnpm-lock.yaml
generated
@ -21,8 +21,8 @@ dependencies:
|
|||||||
specifier: ^3.0.7
|
specifier: ^3.0.7
|
||||||
version: 3.0.7
|
version: 3.0.7
|
||||||
'@push.rocks/smartstream':
|
'@push.rocks/smartstream':
|
||||||
specifier: ^3.0.25
|
specifier: ^3.0.30
|
||||||
version: 3.0.25
|
version: 3.0.30
|
||||||
'@push.rocks/smartunique':
|
'@push.rocks/smartunique':
|
||||||
specifier: ^3.0.6
|
specifier: ^3.0.6
|
||||||
version: 3.0.6
|
version: 3.0.6
|
||||||
@ -747,7 +747,7 @@ packages:
|
|||||||
'@push.rocks/smartpath': 5.0.11
|
'@push.rocks/smartpath': 5.0.11
|
||||||
'@push.rocks/smartpromise': 4.0.3
|
'@push.rocks/smartpromise': 4.0.3
|
||||||
'@push.rocks/smartrequest': 2.0.21
|
'@push.rocks/smartrequest': 2.0.21
|
||||||
'@push.rocks/smartstream': 3.0.25
|
'@push.rocks/smartstream': 3.0.30
|
||||||
'@types/fs-extra': 11.0.3
|
'@types/fs-extra': 11.0.3
|
||||||
'@types/glob': 8.1.0
|
'@types/glob': 8.1.0
|
||||||
'@types/js-yaml': 4.0.8
|
'@types/js-yaml': 4.0.8
|
||||||
@ -964,8 +964,8 @@ packages:
|
|||||||
through2: 4.0.2
|
through2: 4.0.2
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
/@push.rocks/smartstream@3.0.25:
|
/@push.rocks/smartstream@3.0.30:
|
||||||
resolution: {integrity: sha512-rnZ4Xbuhf6CbgRBjV3o7ddSIR2baWsbeEpV39meia2CfsyY9W/WKxBQrWJbrrv8w165ksSWm9Y9Vy0PmNKr5rQ==}
|
resolution: {integrity: sha512-+izraXkILJJIy99PzP2LYahaW+g/35bTi/UxD7FeuOYbTaigode6Q3swvs0nrK6yu+A9x6RfoWV4JAJjd3Y87g==}
|
||||||
dependencies:
|
dependencies:
|
||||||
'@push.rocks/lik': 6.0.12
|
'@push.rocks/lik': 6.0.12
|
||||||
'@push.rocks/smartpromise': 4.0.3
|
'@push.rocks/smartpromise': 4.0.3
|
||||||
|
@ -47,4 +47,4 @@ tap.skip.test('should extract a b2zip', async () => {
|
|||||||
);
|
);
|
||||||
})
|
})
|
||||||
|
|
||||||
tap.start();
|
await tap.start();
|
||||||
|
@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@push.rocks/smartarchive',
|
name: '@push.rocks/smartarchive',
|
||||||
version: '4.0.13',
|
version: '4.0.19',
|
||||||
description: 'work with archives'
|
description: 'work with archives'
|
||||||
}
|
}
|
||||||
|
@ -49,13 +49,16 @@ export function unbzip2Stream() {
|
|||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(e);
|
console.error(e);
|
||||||
broken = true;
|
broken = true;
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
let counter = 0;
|
||||||
return new plugins.smartstream.SmartDuplex({
|
return new plugins.smartstream.SmartDuplex({
|
||||||
|
objectMode: true,
|
||||||
|
name: 'bzip2',
|
||||||
|
debug: false,
|
||||||
|
highWaterMark: 1,
|
||||||
writeFunction: async function (data, streamTools) {
|
writeFunction: async function (data, streamTools) {
|
||||||
//console.error('received', data.length,'bytes in', typeof data);
|
// console.log(`got chunk ${counter++}`)
|
||||||
bufferQueue.push(data);
|
bufferQueue.push(data);
|
||||||
hasBytes += data.length;
|
hasBytes += data.length;
|
||||||
if (bitReader === null) {
|
if (bitReader === null) {
|
||||||
@ -66,6 +69,10 @@ export function unbzip2Stream() {
|
|||||||
while (!broken && hasBytes - bitReader.bytesRead + 1 >= (25000 + 100000 * blockSize || 4)) {
|
while (!broken && hasBytes - bitReader.bytesRead + 1 >= (25000 + 100000 * blockSize || 4)) {
|
||||||
//console.error('decompressing with', hasBytes - bitReader.bytesRead + 1, 'bytes in buffer');
|
//console.error('decompressing with', hasBytes - bitReader.bytesRead + 1, 'bytes in buffer');
|
||||||
const result = await decompressAndPush();
|
const result = await decompressAndPush();
|
||||||
|
if (!result) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// console.log(result.toString());
|
||||||
await streamTools.push(result);
|
await streamTools.push(result);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -73,11 +80,13 @@ export function unbzip2Stream() {
|
|||||||
//console.error(x,'last compressing with', hasBytes, 'bytes in buffer');
|
//console.error(x,'last compressing with', hasBytes, 'bytes in buffer');
|
||||||
while (!broken && bitReader && hasBytes > bitReader.bytesRead) {
|
while (!broken && bitReader && hasBytes > bitReader.bytesRead) {
|
||||||
const result = await decompressAndPush();
|
const result = await decompressAndPush();
|
||||||
streamTools.push(result);
|
if (!result) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
await streamTools.push(result);
|
||||||
}
|
}
|
||||||
if (!broken) {
|
if (!broken) {
|
||||||
if (streamCRC !== null) this.emit('error', new Error('input stream ended prematurely'));
|
if (streamCRC !== null) this.emit('error', new Error('input stream ended prematurely'));
|
||||||
this.queue(null);
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
@ -4,7 +4,7 @@ import * as plugins from './plugins.js';
|
|||||||
export interface IAnalyzedResult {
|
export interface IAnalyzedResult {
|
||||||
fileType: plugins.fileType.FileTypeResult;
|
fileType: plugins.fileType.FileTypeResult;
|
||||||
isArchive: boolean;
|
isArchive: boolean;
|
||||||
resultStream: plugins.smartstream.PassThrough;
|
resultStream: plugins.smartstream.SmartDuplex;
|
||||||
decompressionStream: plugins.stream.Transform | plugins.stream.Duplex | plugins.tarStream.Extract;
|
decompressionStream: plugins.stream.Transform | plugins.stream.Duplex | plugins.tarStream.Extract;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -42,31 +42,30 @@ export class ArchiveAnalyzer {
|
|||||||
return this.smartArchiveRef.tarTools.getDecompressionStream(); // replace with your own tar decompression stream
|
return this.smartArchiveRef.tarTools.getDecompressionStream(); // replace with your own tar decompression stream
|
||||||
default:
|
default:
|
||||||
// Handle unsupported formats or no decompression needed
|
// Handle unsupported formats or no decompression needed
|
||||||
return new plugins.smartstream.PassThrough();
|
return plugins.smartstream.createPassThrough();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public getAnalyzedStream() {
|
public getAnalyzedStream() {
|
||||||
let firstRun = true;
|
let firstRun = true;
|
||||||
const resultStream = new plugins.smartstream.PassThrough();
|
const resultStream = plugins.smartstream.createPassThrough();
|
||||||
const analyzerstream = new plugins.smartstream.SmartDuplex<Buffer, IAnalyzedResult>({
|
const analyzerstream = new plugins.smartstream.SmartDuplex<Buffer, IAnalyzedResult>({
|
||||||
readableObjectMode: true,
|
readableObjectMode: true,
|
||||||
writeFunction: async (chunkArg: Buffer, streamtools) => {
|
writeFunction: async (chunkArg: Buffer, streamtools) => {
|
||||||
const fileType = await plugins.fileType.fileTypeFromBuffer(chunkArg);
|
|
||||||
const decompressionStream = await this.getDecompressionStream(fileType?.mime as any);
|
|
||||||
resultStream.push(chunkArg);
|
|
||||||
if (firstRun) {
|
if (firstRun) {
|
||||||
firstRun = false;
|
firstRun = false;
|
||||||
|
const fileType = await plugins.fileType.fileTypeFromBuffer(chunkArg);
|
||||||
|
const decompressionStream = await this.getDecompressionStream(fileType?.mime as any);
|
||||||
const result: IAnalyzedResult = {
|
const result: IAnalyzedResult = {
|
||||||
fileType,
|
fileType,
|
||||||
isArchive: await this.mimeTypeIsArchive(fileType?.mime),
|
isArchive: await this.mimeTypeIsArchive(fileType?.mime),
|
||||||
resultStream,
|
resultStream,
|
||||||
decompressionStream,
|
decompressionStream,
|
||||||
};
|
};
|
||||||
streamtools.push(result);
|
await streamtools.push(result);
|
||||||
streamtools.push(null);
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
await resultStream.backpressuredPush(chunkArg);
|
||||||
|
return null;
|
||||||
},
|
},
|
||||||
finalFunction: async (tools) => {
|
finalFunction: async (tools) => {
|
||||||
resultStream.push(null);
|
resultStream.push(null);
|
||||||
|
Reference in New Issue
Block a user