fix(core): update
This commit is contained in:
parent
e3ab98751d
commit
32de8087ad
@ -26,7 +26,7 @@
|
|||||||
"@push.rocks/smartpromise": "^4.0.3",
|
"@push.rocks/smartpromise": "^4.0.3",
|
||||||
"@push.rocks/smartrequest": "^2.0.20",
|
"@push.rocks/smartrequest": "^2.0.20",
|
||||||
"@push.rocks/smartrx": "^3.0.7",
|
"@push.rocks/smartrx": "^3.0.7",
|
||||||
"@push.rocks/smartstream": "^3.0.7",
|
"@push.rocks/smartstream": "^3.0.11",
|
||||||
"@push.rocks/smartunique": "^3.0.6",
|
"@push.rocks/smartunique": "^3.0.6",
|
||||||
"@push.rocks/smarturl": "^3.0.7",
|
"@push.rocks/smarturl": "^3.0.7",
|
||||||
"@types/tar-stream": "^3.1.2",
|
"@types/tar-stream": "^3.1.2",
|
||||||
|
10
pnpm-lock.yaml
generated
10
pnpm-lock.yaml
generated
@ -21,8 +21,8 @@ dependencies:
|
|||||||
specifier: ^3.0.7
|
specifier: ^3.0.7
|
||||||
version: 3.0.7
|
version: 3.0.7
|
||||||
'@push.rocks/smartstream':
|
'@push.rocks/smartstream':
|
||||||
specifier: ^3.0.7
|
specifier: ^3.0.11
|
||||||
version: 3.0.7
|
version: 3.0.11
|
||||||
'@push.rocks/smartunique':
|
'@push.rocks/smartunique':
|
||||||
specifier: ^3.0.6
|
specifier: ^3.0.6
|
||||||
version: 3.0.6
|
version: 3.0.6
|
||||||
@ -717,7 +717,7 @@ packages:
|
|||||||
'@push.rocks/smartpath': 5.0.11
|
'@push.rocks/smartpath': 5.0.11
|
||||||
'@push.rocks/smartpromise': 4.0.3
|
'@push.rocks/smartpromise': 4.0.3
|
||||||
'@push.rocks/smartrequest': 2.0.20
|
'@push.rocks/smartrequest': 2.0.20
|
||||||
'@push.rocks/smartstream': 3.0.7
|
'@push.rocks/smartstream': 3.0.11
|
||||||
'@types/fs-extra': 11.0.3
|
'@types/fs-extra': 11.0.3
|
||||||
'@types/glob': 8.1.0
|
'@types/glob': 8.1.0
|
||||||
'@types/js-yaml': 4.0.8
|
'@types/js-yaml': 4.0.8
|
||||||
@ -934,8 +934,8 @@ packages:
|
|||||||
through2: 4.0.2
|
through2: 4.0.2
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
/@push.rocks/smartstream@3.0.7:
|
/@push.rocks/smartstream@3.0.11:
|
||||||
resolution: {integrity: sha512-F4HsYlMJusa7uf18aIXGuuAdlPxKaIcr7UDMLg4QUCtGK114SVt6E+72bXtN6yPyZ40+x8BVUWUkkTOdw22BeA==}
|
resolution: {integrity: sha512-MrJGCXcUYliAZlE/ozGzCj6Udtg/2f4OfJCd/7We8tK2kS+YWl+TSvubz8KFbUqcl5dqHQro0txrmVcthd9gEQ==}
|
||||||
dependencies:
|
dependencies:
|
||||||
'@push.rocks/smartpromise': 4.0.3
|
'@push.rocks/smartpromise': 4.0.3
|
||||||
'@push.rocks/smartrx': 3.0.7
|
'@push.rocks/smartrx': 3.0.7
|
||||||
|
11
test/test.ts
11
test/test.ts
@ -35,7 +35,16 @@ tap.test('should extract existing files on disk', async () => {
|
|||||||
const testSmartarchive = await smartarchive.SmartArchive.fromArchiveUrl(
|
const testSmartarchive = await smartarchive.SmartArchive.fromArchiveUrl(
|
||||||
'https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz'
|
'https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz'
|
||||||
);
|
);
|
||||||
const streamfileStream = await testSmartarchive.exportToFs(testPaths.nogitDir);
|
await testSmartarchive.exportToFs(testPaths.nogitDir);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
tap.test('should extract a b2zip', async () => {
|
||||||
|
const dataUrl = 'https://daten.offeneregister.de/de_companies_ocdata.jsonl.bz2';
|
||||||
|
const testArchive = await smartarchive.SmartArchive.fromArchiveUrl(dataUrl);
|
||||||
|
await testArchive.exportToFs(
|
||||||
|
plugins.path.join(testPaths.nogitDir, 'de_companies_ocdata.jsonl'),
|
||||||
|
'data.jsonl',
|
||||||
|
);
|
||||||
|
})
|
||||||
|
|
||||||
tap.start();
|
tap.start();
|
||||||
|
@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@push.rocks/smartarchive',
|
name: '@push.rocks/smartarchive',
|
||||||
version: '4.0.2',
|
version: '4.0.3',
|
||||||
description: 'work with archives'
|
description: 'work with archives'
|
||||||
}
|
}
|
||||||
|
@ -53,13 +53,13 @@ export class ArchiveAnalyzer {
|
|||||||
readableObjectMode: true,
|
readableObjectMode: true,
|
||||||
writeAndTransformFunction: async (chunkArg: Buffer, streamtools) => {
|
writeAndTransformFunction: async (chunkArg: Buffer, streamtools) => {
|
||||||
const fileType = await plugins.fileType.fileTypeFromBuffer(chunkArg);
|
const fileType = await plugins.fileType.fileTypeFromBuffer(chunkArg);
|
||||||
const decompressionStream = this.getDecompressionStream(fileType.mime as any);
|
const decompressionStream = this.getDecompressionStream(fileType?.mime as any);
|
||||||
resultStream.push(chunkArg);
|
resultStream.push(chunkArg);
|
||||||
if (firstRun) {
|
if (firstRun) {
|
||||||
firstRun = false;
|
firstRun = false;
|
||||||
const result: IAnalyzedResult = {
|
const result: IAnalyzedResult = {
|
||||||
fileType,
|
fileType,
|
||||||
isArchive: await this.mimeTypeIsArchive(fileType.mime),
|
isArchive: await this.mimeTypeIsArchive(fileType?.mime),
|
||||||
resultStream,
|
resultStream,
|
||||||
decompressionStream,
|
decompressionStream,
|
||||||
};
|
};
|
||||||
@ -68,6 +68,10 @@ export class ArchiveAnalyzer {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
finalFunction: async (tools) => {
|
||||||
|
resultStream.push(null);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
});
|
});
|
||||||
return analyzerstream;
|
return analyzerstream;
|
||||||
}
|
}
|
||||||
|
@ -74,7 +74,7 @@ export class SmartArchive {
|
|||||||
// return archiveStream;
|
// return archiveStream;
|
||||||
}
|
}
|
||||||
|
|
||||||
public async exportToFs(targetDir: string): Promise<void> {
|
public async exportToFs(targetDir: string, fileNameArg?: string): Promise<void> {
|
||||||
const done = plugins.smartpromise.defer<void>();
|
const done = plugins.smartpromise.defer<void>();
|
||||||
const streamFileStream = await this.exportToStreamOfStreamFiles();
|
const streamFileStream = await this.exportToStreamOfStreamFiles();
|
||||||
streamFileStream.pipe(new plugins.smartstream.SmartDuplex({
|
streamFileStream.pipe(new plugins.smartstream.SmartDuplex({
|
||||||
@ -83,14 +83,15 @@ export class SmartArchive {
|
|||||||
console.log(chunkArg.relativeFilePath);
|
console.log(chunkArg.relativeFilePath);
|
||||||
const streamFile = chunkArg;
|
const streamFile = chunkArg;
|
||||||
const readStream = await streamFile.createReadStream();
|
const readStream = await streamFile.createReadStream();
|
||||||
const writePath = plugins.path.join(targetDir + streamFile.relativeFilePath);
|
await plugins.smartfile.fs.ensureDir(targetDir);
|
||||||
const dir = plugins.path.parse(writePath).dir;
|
const writePath = plugins.path.join(targetDir, (streamFile.relativeFilePath || fileNameArg));
|
||||||
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(dir));
|
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(writePath));
|
||||||
const writeStream = plugins.smartfile.fsStream.createWriteStream(writePath);
|
const writeStream = plugins.smartfile.fsStream.createWriteStream(writePath);
|
||||||
readStream.pipe(writeStream).end(() => {
|
readStream.pipe(writeStream);
|
||||||
done.resolve();
|
|
||||||
});
|
|
||||||
},
|
},
|
||||||
|
finalFunction: async () => {
|
||||||
|
done.resolve();
|
||||||
|
}
|
||||||
}));
|
}));
|
||||||
return done.promise;
|
return done.promise;
|
||||||
}
|
}
|
||||||
@ -106,7 +107,7 @@ export class SmartArchive {
|
|||||||
const createUnpackStream = () =>
|
const createUnpackStream = () =>
|
||||||
plugins.smartstream.createTransformFunction<IAnalyzedResult, any>(
|
plugins.smartstream.createTransformFunction<IAnalyzedResult, any>(
|
||||||
async (analyzedResultChunk) => {
|
async (analyzedResultChunk) => {
|
||||||
if (analyzedResultChunk.fileType.mime === 'application/x-tar') {
|
if (analyzedResultChunk.fileType?.mime === 'application/x-tar') {
|
||||||
const tarStream = analyzedResultChunk.decompressionStream as plugins.tarStream.Extract;
|
const tarStream = analyzedResultChunk.decompressionStream as plugins.tarStream.Extract;
|
||||||
tarStream.on(
|
tarStream.on(
|
||||||
'entry',
|
'entry',
|
||||||
@ -116,11 +117,13 @@ export class SmartArchive {
|
|||||||
stream.on('end', function () {
|
stream.on('end', function () {
|
||||||
next(); // ready for next entry
|
next(); // ready for next entry
|
||||||
});
|
});
|
||||||
|
stream.resume(); // just auto drain the stream
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
tarStream.on('finish', function () {
|
tarStream.on('finish', function () {
|
||||||
|
console.log('finished');
|
||||||
streamFileIntake.signalEnd();
|
streamFileIntake.signalEnd();
|
||||||
})
|
});
|
||||||
analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream);
|
analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream);
|
||||||
} else if (analyzedResultChunk.isArchive && analyzedResultChunk.decompressionStream) {
|
} else if (analyzedResultChunk.isArchive && analyzedResultChunk.decompressionStream) {
|
||||||
analyzedResultChunk.resultStream
|
analyzedResultChunk.resultStream
|
||||||
@ -130,11 +133,11 @@ export class SmartArchive {
|
|||||||
} else {
|
} else {
|
||||||
const streamFile = plugins.smartfile.StreamFile.fromStream(
|
const streamFile = plugins.smartfile.StreamFile.fromStream(
|
||||||
analyzedResultChunk.resultStream,
|
analyzedResultChunk.resultStream,
|
||||||
analyzedResultChunk.fileType.ext
|
analyzedResultChunk.fileType?.ext
|
||||||
);
|
);
|
||||||
streamFileIntake.push(streamFile);
|
streamFileIntake.push(streamFile);
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
archiveStream.pipe(createAnalyzedStream()).pipe(createUnpackStream());
|
archiveStream.pipe(createAnalyzedStream()).pipe(createUnpackStream());
|
||||||
|
Loading…
Reference in New Issue
Block a user