fix(core): update

This commit is contained in:
2023-11-07 04:19:54 +01:00
parent e3ab98751d
commit 32de8087ad
6 changed files with 37 additions and 21 deletions

View File

@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@push.rocks/smartarchive',
version: '4.0.2',
version: '4.0.3',
description: 'work with archives'
}

View File

@ -53,13 +53,13 @@ export class ArchiveAnalyzer {
readableObjectMode: true,
writeAndTransformFunction: async (chunkArg: Buffer, streamtools) => {
const fileType = await plugins.fileType.fileTypeFromBuffer(chunkArg);
const decompressionStream = this.getDecompressionStream(fileType.mime as any);
const decompressionStream = this.getDecompressionStream(fileType?.mime as any);
resultStream.push(chunkArg);
if (firstRun) {
firstRun = false;
const result: IAnalyzedResult = {
fileType,
isArchive: await this.mimeTypeIsArchive(fileType.mime),
isArchive: await this.mimeTypeIsArchive(fileType?.mime),
resultStream,
decompressionStream,
};
@ -68,6 +68,10 @@ export class ArchiveAnalyzer {
return null;
}
},
finalFunction: async (tools) => {
resultStream.push(null);
return null;
}
});
return analyzerstream;
}

View File

@ -74,7 +74,7 @@ export class SmartArchive {
// return archiveStream;
}
public async exportToFs(targetDir: string): Promise<void> {
public async exportToFs(targetDir: string, fileNameArg?: string): Promise<void> {
const done = plugins.smartpromise.defer<void>();
const streamFileStream = await this.exportToStreamOfStreamFiles();
streamFileStream.pipe(new plugins.smartstream.SmartDuplex({
@ -83,14 +83,15 @@ export class SmartArchive {
console.log(chunkArg.relativeFilePath);
const streamFile = chunkArg;
const readStream = await streamFile.createReadStream();
const writePath = plugins.path.join(targetDir + streamFile.relativeFilePath);
const dir = plugins.path.parse(writePath).dir;
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(dir));
await plugins.smartfile.fs.ensureDir(targetDir);
const writePath = plugins.path.join(targetDir, (streamFile.relativeFilePath || fileNameArg));
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(writePath));
const writeStream = plugins.smartfile.fsStream.createWriteStream(writePath);
readStream.pipe(writeStream).end(() => {
done.resolve();
});
readStream.pipe(writeStream);
},
finalFunction: async () => {
done.resolve();
}
}));
return done.promise;
}
@ -106,7 +107,7 @@ export class SmartArchive {
const createUnpackStream = () =>
plugins.smartstream.createTransformFunction<IAnalyzedResult, any>(
async (analyzedResultChunk) => {
if (analyzedResultChunk.fileType.mime === 'application/x-tar') {
if (analyzedResultChunk.fileType?.mime === 'application/x-tar') {
const tarStream = analyzedResultChunk.decompressionStream as plugins.tarStream.Extract;
tarStream.on(
'entry',
@ -116,11 +117,13 @@ export class SmartArchive {
stream.on('end', function () {
next(); // ready for next entry
});
stream.resume(); // just auto drain the stream
}
);
tarStream.on('finish', function () {
console.log('finished');
streamFileIntake.signalEnd();
})
});
analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream);
} else if (analyzedResultChunk.isArchive && analyzedResultChunk.decompressionStream) {
analyzedResultChunk.resultStream
@ -130,11 +133,11 @@ export class SmartArchive {
} else {
const streamFile = plugins.smartfile.StreamFile.fromStream(
analyzedResultChunk.resultStream,
analyzedResultChunk.fileType.ext
analyzedResultChunk.fileType?.ext
);
streamFileIntake.push(streamFile);
}
}
},
);
archiveStream.pipe(createAnalyzedStream()).pipe(createUnpackStream());