Compare commits

..

8 Commits

Author SHA1 Message Date
5ff51ff88d 4.0.17 2023-11-14 10:55:20 +01:00
c578a3fdc1 fix(core): update 2023-11-14 10:55:19 +01:00
ad0352a712 4.0.16 2023-11-13 23:14:39 +01:00
f921338fd6 fix(core): update 2023-11-13 23:14:39 +01:00
614dae5ade 4.0.15 2023-11-13 22:11:25 +01:00
f87359fb97 fix(core): update 2023-11-13 22:11:24 +01:00
21da75c09a 4.0.14 2023-11-13 20:41:52 +01:00
fe49d25765 fix(core): update 2023-11-13 20:41:52 +01:00
6 changed files with 29 additions and 24 deletions

View File

@ -1,6 +1,6 @@
{
"name": "@push.rocks/smartarchive",
"version": "4.0.13",
"version": "4.0.17",
"description": "work with archives",
"main": "dist_ts/index.js",
"typings": "dist_ts/index.d.ts",
@ -26,7 +26,7 @@
"@push.rocks/smartpromise": "^4.0.3",
"@push.rocks/smartrequest": "^2.0.21",
"@push.rocks/smartrx": "^3.0.7",
"@push.rocks/smartstream": "^3.0.25",
"@push.rocks/smartstream": "^3.0.28",
"@push.rocks/smartunique": "^3.0.6",
"@push.rocks/smarturl": "^3.0.7",
"@types/tar-stream": "^3.1.3",

10
pnpm-lock.yaml generated
View File

@ -21,8 +21,8 @@ dependencies:
specifier: ^3.0.7
version: 3.0.7
'@push.rocks/smartstream':
specifier: ^3.0.25
version: 3.0.25
specifier: ^3.0.28
version: 3.0.28
'@push.rocks/smartunique':
specifier: ^3.0.6
version: 3.0.6
@ -747,7 +747,7 @@ packages:
'@push.rocks/smartpath': 5.0.11
'@push.rocks/smartpromise': 4.0.3
'@push.rocks/smartrequest': 2.0.21
'@push.rocks/smartstream': 3.0.25
'@push.rocks/smartstream': 3.0.28
'@types/fs-extra': 11.0.3
'@types/glob': 8.1.0
'@types/js-yaml': 4.0.8
@ -964,8 +964,8 @@ packages:
through2: 4.0.2
dev: true
/@push.rocks/smartstream@3.0.25:
resolution: {integrity: sha512-rnZ4Xbuhf6CbgRBjV3o7ddSIR2baWsbeEpV39meia2CfsyY9W/WKxBQrWJbrrv8w165ksSWm9Y9Vy0PmNKr5rQ==}
/@push.rocks/smartstream@3.0.28:
resolution: {integrity: sha512-OYKSjjyQj5h9+bxz4cHfUCpbYGHjB3TvseAY/3gWOrli7neGIPg7ycCDiiUYXfYQuPjof9GMzuBsAy4cpiyFNA==}
dependencies:
'@push.rocks/lik': 6.0.12
'@push.rocks/smartpromise': 4.0.3

View File

@ -38,7 +38,7 @@ tap.test('should extract existing files on disk', async () => {
await testSmartarchive.exportToFs(testPaths.nogitDir);
});
tap.skip.test('should extract a b2zip', async () => {
tap.test('should extract a b2zip', async () => {
const dataUrl = 'https://daten.offeneregister.de/de_companies_ocdata.jsonl.bz2';
const testArchive = await smartarchive.SmartArchive.fromArchiveUrl(dataUrl);
await testArchive.exportToFs(
@ -47,4 +47,4 @@ tap.skip.test('should extract a b2zip', async () => {
);
})
tap.start();
await tap.start();

View File

@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@push.rocks/smartarchive',
version: '4.0.13',
version: '4.0.17',
description: 'work with archives'
}

View File

@ -49,13 +49,16 @@ export function unbzip2Stream() {
} catch (e) {
console.error(e);
broken = true;
return false;
}
};
let counter = 0;
return new plugins.smartstream.SmartDuplex({
objectMode: true,
name: 'bzip2',
debug: true,
highWaterMark: 1,
writeFunction: async function (data, streamTools) {
//console.error('received', data.length,'bytes in', typeof data);
// console.log(`got chunk ${counter++}`)
bufferQueue.push(data);
hasBytes += data.length;
if (bitReader === null) {
@ -66,6 +69,10 @@ export function unbzip2Stream() {
while (!broken && hasBytes - bitReader.bytesRead + 1 >= (25000 + 100000 * blockSize || 4)) {
//console.error('decompressing with', hasBytes - bitReader.bytesRead + 1, 'bytes in buffer');
const result = await decompressAndPush();
if (!result) {
continue;
}
// console.log(result.toString());
await streamTools.push(result);
}
},
@ -73,11 +80,10 @@ export function unbzip2Stream() {
//console.error(x,'last compressing with', hasBytes, 'bytes in buffer');
while (!broken && bitReader && hasBytes > bitReader.bytesRead) {
const result = await decompressAndPush();
streamTools.push(result);
await streamTools.push(result);
}
if (!broken) {
if (streamCRC !== null) this.emit('error', new Error('input stream ended prematurely'));
this.queue(null);
}
},
});

View File

@ -4,7 +4,7 @@ import * as plugins from './plugins.js';
export interface IAnalyzedResult {
fileType: plugins.fileType.FileTypeResult;
isArchive: boolean;
resultStream: plugins.smartstream.PassThrough;
resultStream: plugins.smartstream.SmartDuplex;
decompressionStream: plugins.stream.Transform | plugins.stream.Duplex | plugins.tarStream.Extract;
}
@ -42,31 +42,30 @@ export class ArchiveAnalyzer {
return this.smartArchiveRef.tarTools.getDecompressionStream(); // replace with your own tar decompression stream
default:
// Handle unsupported formats or no decompression needed
return new plugins.smartstream.PassThrough();
return plugins.smartstream.createPassThrough();
}
}
public getAnalyzedStream() {
let firstRun = true;
const resultStream = new plugins.smartstream.PassThrough();
const resultStream = plugins.smartstream.createPassThrough();
const analyzerstream = new plugins.smartstream.SmartDuplex<Buffer, IAnalyzedResult>({
readableObjectMode: true,
writeFunction: async (chunkArg: Buffer, streamtools) => {
const fileType = await plugins.fileType.fileTypeFromBuffer(chunkArg);
const decompressionStream = await this.getDecompressionStream(fileType?.mime as any);
resultStream.push(chunkArg);
if (firstRun) {
firstRun = false;
const fileType = await plugins.fileType.fileTypeFromBuffer(chunkArg);
const decompressionStream = await this.getDecompressionStream(fileType?.mime as any);
const result: IAnalyzedResult = {
fileType,
isArchive: await this.mimeTypeIsArchive(fileType?.mime),
resultStream,
decompressionStream,
};
streamtools.push(result);
streamtools.push(null);
return null;
await streamtools.push(result);
}
await resultStream.backpressuredPush(chunkArg);
return null;
},
finalFunction: async (tools) => {
resultStream.push(null);