fix(core): update

This commit is contained in:
Philipp Kunz 2023-11-06 19:46:02 +01:00
parent 77e75ee0d9
commit 401150bd10
3 changed files with 7 additions and 17 deletions

View File

@ -35,21 +35,7 @@ tap.test('should extract existing files on disk', async () => {
const testSmartarchive = await smartarchive.SmartArchive.fromArchiveUrl( const testSmartarchive = await smartarchive.SmartArchive.fromArchiveUrl(
'https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz' 'https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz'
); );
const streamfileStream = await testSmartarchive.exportToStreamOfStreamFiles(); const streamfileStream = await testSmartarchive.exportToFs(testPaths.nogitDir);
streamfileStream.pipe(new plugins.smartstream.SmartDuplex({
objectMode: true,
writeAndTransformFunction: async (chunkArg: plugins.smartfile.StreamFile, streamtools) => {
console.log(chunkArg.relativeFilePath);
const streamFile = chunkArg;
const readStream = await streamFile.createReadStream();
const writePath = plugins.path.join(testPaths.nogitDir + streamFile.relativeFilePath);
const dir = plugins.path.parse(writePath).dir;
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(dir));
const writeStream = plugins.smartfile.fsStream.createWriteStream(writePath);
readStream.pipe(writeStream);
},
}));
}); });
tap.start(); tap.start();

View File

@ -3,6 +3,6 @@
*/ */
export const commitinfo = { export const commitinfo = {
name: '@push.rocks/smartarchive', name: '@push.rocks/smartarchive',
version: '4.0.1', version: '4.0.2',
description: 'work with archives' description: 'work with archives'
} }

View File

@ -107,7 +107,8 @@ export class SmartArchive {
plugins.smartstream.createTransformFunction<IAnalyzedResult, any>( plugins.smartstream.createTransformFunction<IAnalyzedResult, any>(
async (analyzedResultChunk) => { async (analyzedResultChunk) => {
if (analyzedResultChunk.fileType.mime === 'application/x-tar') { if (analyzedResultChunk.fileType.mime === 'application/x-tar') {
(analyzedResultChunk.decompressionStream as plugins.tarStream.Extract).on( const tarStream = analyzedResultChunk.decompressionStream as plugins.tarStream.Extract;
tarStream.on(
'entry', 'entry',
async (header, stream, next) => { async (header, stream, next) => {
const streamfile = plugins.smartfile.StreamFile.fromStream(stream, header.name); const streamfile = plugins.smartfile.StreamFile.fromStream(stream, header.name);
@ -117,6 +118,9 @@ export class SmartArchive {
}); });
} }
); );
tarStream.on('finish', function () {
streamFileIntake.signalEnd();
})
analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream); analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream);
} else if (analyzedResultChunk.isArchive && analyzedResultChunk.decompressionStream) { } else if (analyzedResultChunk.isArchive && analyzedResultChunk.decompressionStream) {
analyzedResultChunk.resultStream analyzedResultChunk.resultStream