fix(smartarchive): Improve tar entry streaming handling and add in-memory gzip/tgz tests

This commit is contained in:
2025-08-18 02:06:31 +00:00
parent 9dbb7d9731
commit ec58b9cdc5
4 changed files with 213 additions and 7 deletions

View File

@@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@push.rocks/smartarchive',
version: '4.2.1',
version: '4.2.2',
description: 'A library for working with archive files, providing utilities for compressing and decompressing data.'
}

View File

@@ -158,21 +158,34 @@ export class SmartArchive {
console.log(
`tar stream directory: ${header.name} ... skipping!`,
);
next();
stream.resume(); // Consume directory stream
stream.on('end', () => next());
return;
}
console.log(`tar stream file: ${header.name}`);
// Create a PassThrough stream to buffer the data
const passThrough = new plugins.stream.PassThrough();
const streamfile = plugins.smartfile.StreamFile.fromStream(
stream,
passThrough,
header.name,
);
// Push the streamfile immediately
streamFileIntake.push(streamfile);
stream.on('end', function () {
next(); // ready for next entry
// Pipe the tar entry stream to the passthrough
stream.pipe(passThrough);
// Move to next entry when this one ends
stream.on('end', () => {
passThrough.end();
next();
});
});
tarStream.on('finish', function () {
console.log('finished');
console.log('tar extraction finished');
// Only signal end if this is the final stream
streamFileIntake.signalEnd();
});
analyzedResultChunk.resultStream.pipe(
@@ -199,10 +212,13 @@ export class SmartArchive {
analyzedResultChunk.isArchive &&
analyzedResultChunk.decompressionStream
) {
analyzedResultChunk.resultStream
// For nested archives (like gzip containing tar)
const nestedStream = analyzedResultChunk.resultStream
.pipe(analyzedResultChunk.decompressionStream)
.pipe(createAnalyzedStream())
.pipe(createUnpackStream());
// Don't signal end here - let the nested unpacker handle it
} else {
const streamFile = plugins.smartfile.StreamFile.fromStream(
analyzedResultChunk.resultStream,