fix(core): update
This commit is contained in:
parent
25e847a9ea
commit
d53c46fa82
19
package.json
19
package.json
@ -21,25 +21,26 @@
|
|||||||
},
|
},
|
||||||
"homepage": "https://github.com/pushrocks/smartarchive#readme",
|
"homepage": "https://github.com/pushrocks/smartarchive#readme",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@push.rocks/smartfile": "^11.0.0",
|
"@push.rocks/smartdelay": "^3.0.5",
|
||||||
|
"@push.rocks/smartfile": "^11.0.4",
|
||||||
"@push.rocks/smartpath": "^5.0.11",
|
"@push.rocks/smartpath": "^5.0.11",
|
||||||
"@push.rocks/smartpromise": "^4.0.3",
|
"@push.rocks/smartpromise": "^4.0.3",
|
||||||
"@push.rocks/smartrequest": "^2.0.21",
|
"@push.rocks/smartrequest": "^2.0.21",
|
||||||
"@push.rocks/smartrx": "^3.0.7",
|
"@push.rocks/smartrx": "^3.0.7",
|
||||||
"@push.rocks/smartstream": "^3.0.30",
|
"@push.rocks/smartstream": "^3.0.34",
|
||||||
"@push.rocks/smartunique": "^3.0.6",
|
"@push.rocks/smartunique": "^3.0.8",
|
||||||
"@push.rocks/smarturl": "^3.0.7",
|
"@push.rocks/smarturl": "^3.0.7",
|
||||||
"@types/tar-stream": "^3.1.3",
|
"@types/tar-stream": "^3.1.3",
|
||||||
"fflate": "^0.8.1",
|
"fflate": "^0.8.2",
|
||||||
"file-type": "^18.7.0",
|
"file-type": "^19.0.0",
|
||||||
"tar-stream": "^3.1.6",
|
"tar-stream": "^3.1.7",
|
||||||
"through": "^2.3.8"
|
"through": "^2.3.8"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.1.66",
|
"@git.zone/tsbuild": "^2.1.72",
|
||||||
"@git.zone/tsrun": "^1.2.44",
|
"@git.zone/tsrun": "^1.2.44",
|
||||||
"@git.zone/tstest": "^1.0.84",
|
"@git.zone/tstest": "^1.0.88",
|
||||||
"@push.rocks/tapbundle": "^5.0.15"
|
"@push.rocks/tapbundle": "^5.0.17"
|
||||||
},
|
},
|
||||||
"private": false,
|
"private": false,
|
||||||
"files": [
|
"files": [
|
||||||
|
2191
pnpm-lock.yaml
generated
2191
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@push.rocks/smartarchive',
|
name: '@push.rocks/smartarchive',
|
||||||
version: '4.0.19',
|
version: '4.0.20',
|
||||||
description: 'work with archives'
|
description: 'work with archives'
|
||||||
}
|
}
|
||||||
|
@ -36,6 +36,8 @@ export class ArchiveAnalyzer {
|
|||||||
switch (mimeTypeArg) {
|
switch (mimeTypeArg) {
|
||||||
case 'application/gzip':
|
case 'application/gzip':
|
||||||
return this.smartArchiveRef.gzipTools.getDecompressionStream();
|
return this.smartArchiveRef.gzipTools.getDecompressionStream();
|
||||||
|
case 'application/zip':
|
||||||
|
return this.smartArchiveRef.zipTools.getDecompressionStream();
|
||||||
case 'application/x-bzip2':
|
case 'application/x-bzip2':
|
||||||
return await this.smartArchiveRef.bzip2Tools.getDecompressionStream(); // replace with your own bzip2 decompression stream
|
return await this.smartArchiveRef.bzip2Tools.getDecompressionStream(); // replace with your own bzip2 decompression stream
|
||||||
case 'application/x-tar':
|
case 'application/x-tar':
|
||||||
@ -56,6 +58,9 @@ export class ArchiveAnalyzer {
|
|||||||
firstRun = false;
|
firstRun = false;
|
||||||
const fileType = await plugins.fileType.fileTypeFromBuffer(chunkArg);
|
const fileType = await plugins.fileType.fileTypeFromBuffer(chunkArg);
|
||||||
const decompressionStream = await this.getDecompressionStream(fileType?.mime as any);
|
const decompressionStream = await this.getDecompressionStream(fileType?.mime as any);
|
||||||
|
/**
|
||||||
|
* analyzed stream emits once with this object
|
||||||
|
*/
|
||||||
const result: IAnalyzedResult = {
|
const result: IAnalyzedResult = {
|
||||||
fileType,
|
fileType,
|
||||||
isArchive: await this.mimeTypeIsArchive(fileType?.mime),
|
isArchive: await this.mimeTypeIsArchive(fileType?.mime),
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
import * as plugins from './plugins.js';
|
import * as plugins from './plugins.js';
|
||||||
import * as paths from './paths.js';
|
import * as paths from './paths.js';
|
||||||
|
|
||||||
|
import { Bzip2Tools } from './classes.bzip2tools.js';
|
||||||
import { GzipTools } from './classes.gziptools.js';
|
import { GzipTools } from './classes.gziptools.js';
|
||||||
import { TarTools } from './classes.tartools.js';
|
import { TarTools } from './classes.tartools.js';
|
||||||
import { Bzip2Tools } from './classes.bzip2tools.js';
|
import { ZipTools } from './classes.ziptools.js';
|
||||||
|
|
||||||
import { ArchiveAnalyzer, type IAnalyzedResult } from './classes.archiveanalyzer.js';
|
import { ArchiveAnalyzer, type IAnalyzedResult } from './classes.archiveanalyzer.js';
|
||||||
|
|
||||||
@ -32,9 +33,10 @@ export class SmartArchive {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// INSTANCE
|
// INSTANCE
|
||||||
public tarTools = new TarTools(this);
|
|
||||||
public gzipTools = new GzipTools(this);
|
public gzipTools = new GzipTools(this);
|
||||||
public bzip2Tools = new Bzip2Tools(this);
|
public bzip2Tools = new Bzip2Tools(this);
|
||||||
|
public tarTools = new TarTools(this);
|
||||||
|
public zipTools = new ZipTools(this);
|
||||||
public archiveAnalyzer = new ArchiveAnalyzer(this);
|
public archiveAnalyzer = new ArchiveAnalyzer(this);
|
||||||
|
|
||||||
public sourceUrl: string;
|
public sourceUrl: string;
|
||||||
@ -77,7 +79,8 @@ export class SmartArchive {
|
|||||||
public async exportToFs(targetDir: string, fileNameArg?: string): Promise<void> {
|
public async exportToFs(targetDir: string, fileNameArg?: string): Promise<void> {
|
||||||
const done = plugins.smartpromise.defer<void>();
|
const done = plugins.smartpromise.defer<void>();
|
||||||
const streamFileStream = await this.exportToStreamOfStreamFiles();
|
const streamFileStream = await this.exportToStreamOfStreamFiles();
|
||||||
streamFileStream.pipe(new plugins.smartstream.SmartDuplex({
|
streamFileStream.pipe(
|
||||||
|
new plugins.smartstream.SmartDuplex({
|
||||||
objectMode: true,
|
objectMode: true,
|
||||||
writeFunction: async (chunkArg: plugins.smartfile.StreamFile, streamtools) => {
|
writeFunction: async (chunkArg: plugins.smartfile.StreamFile, streamtools) => {
|
||||||
const done = plugins.smartpromise.defer<void>();
|
const done = plugins.smartpromise.defer<void>();
|
||||||
@ -85,19 +88,23 @@ export class SmartArchive {
|
|||||||
const streamFile = chunkArg;
|
const streamFile = chunkArg;
|
||||||
const readStream = await streamFile.createReadStream();
|
const readStream = await streamFile.createReadStream();
|
||||||
await plugins.smartfile.fs.ensureDir(targetDir);
|
await plugins.smartfile.fs.ensureDir(targetDir);
|
||||||
const writePath = plugins.path.join(targetDir, (streamFile.relativeFilePath || fileNameArg));
|
const writePath = plugins.path.join(
|
||||||
|
targetDir,
|
||||||
|
streamFile.relativeFilePath || fileNameArg
|
||||||
|
);
|
||||||
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(writePath));
|
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(writePath));
|
||||||
const writeStream = plugins.smartfile.fsStream.createWriteStream(writePath);
|
const writeStream = plugins.smartfile.fsStream.createWriteStream(writePath);
|
||||||
readStream.pipe(writeStream);
|
readStream.pipe(writeStream);
|
||||||
writeStream.on('finish', () => {
|
writeStream.on('finish', () => {
|
||||||
done.resolve();
|
done.resolve();
|
||||||
})
|
});
|
||||||
await done.promise;
|
await done.promise;
|
||||||
},
|
},
|
||||||
finalFunction: async () => {
|
finalFunction: async () => {
|
||||||
done.resolve();
|
done.resolve();
|
||||||
}
|
},
|
||||||
}));
|
})
|
||||||
|
);
|
||||||
return done.promise;
|
return done.promise;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -114,21 +121,27 @@ export class SmartArchive {
|
|||||||
async (analyzedResultChunk) => {
|
async (analyzedResultChunk) => {
|
||||||
if (analyzedResultChunk.fileType?.mime === 'application/x-tar') {
|
if (analyzedResultChunk.fileType?.mime === 'application/x-tar') {
|
||||||
const tarStream = analyzedResultChunk.decompressionStream as plugins.tarStream.Extract;
|
const tarStream = analyzedResultChunk.decompressionStream as plugins.tarStream.Extract;
|
||||||
tarStream.on(
|
tarStream.on('entry', async (header, stream, next) => {
|
||||||
'entry',
|
|
||||||
async (header, stream, next) => {
|
|
||||||
const streamfile = plugins.smartfile.StreamFile.fromStream(stream, header.name);
|
const streamfile = plugins.smartfile.StreamFile.fromStream(stream, header.name);
|
||||||
streamFileIntake.push(streamfile);
|
streamFileIntake.push(streamfile);
|
||||||
stream.on('end', function () {
|
stream.on('end', function () {
|
||||||
next(); // ready for next entry
|
next(); // ready for next entry
|
||||||
});
|
});
|
||||||
}
|
});
|
||||||
);
|
|
||||||
tarStream.on('finish', function () {
|
tarStream.on('finish', function () {
|
||||||
console.log('finished');
|
console.log('finished');
|
||||||
streamFileIntake.signalEnd();
|
streamFileIntake.signalEnd();
|
||||||
});
|
});
|
||||||
analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream);
|
analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream);
|
||||||
|
} else if (analyzedResultChunk.fileType?.mime === 'application/zip') {
|
||||||
|
analyzedResultChunk.resultStream
|
||||||
|
.pipe(analyzedResultChunk.decompressionStream)
|
||||||
|
.pipe(new plugins.smartstream.SmartDuplex({
|
||||||
|
objectMode: true,
|
||||||
|
writeFunction: async (streamFileArg: plugins.smartfile.StreamFile, streamtools) => {
|
||||||
|
streamFileIntake.push(streamFileArg);
|
||||||
|
},
|
||||||
|
}));
|
||||||
} else if (analyzedResultChunk.isArchive && analyzedResultChunk.decompressionStream) {
|
} else if (analyzedResultChunk.isArchive && analyzedResultChunk.decompressionStream) {
|
||||||
analyzedResultChunk.resultStream
|
analyzedResultChunk.resultStream
|
||||||
.pipe(analyzedResultChunk.decompressionStream)
|
.pipe(analyzedResultChunk.decompressionStream)
|
||||||
@ -142,8 +155,9 @@ export class SmartArchive {
|
|||||||
streamFileIntake.push(streamFile);
|
streamFileIntake.push(streamFile);
|
||||||
streamFileIntake.signalEnd();
|
streamFileIntake.signalEnd();
|
||||||
}
|
}
|
||||||
}, {
|
},
|
||||||
objectMode: true
|
{
|
||||||
|
objectMode: true,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
77
ts/classes.ziptools.ts
Normal file
77
ts/classes.ziptools.ts
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
import type { SmartArchive } from './classes.smartarchive.js';
|
||||||
|
import * as plugins from './plugins.js';
|
||||||
|
|
||||||
|
class DecompressZipTransform extends plugins.smartstream.SmartDuplex<ArrayBufferLike> {
|
||||||
|
private streamtools: plugins.smartstream.IStreamTools;
|
||||||
|
private unzipper = new plugins.fflate.Unzip(async (fileArg) => {
|
||||||
|
let resultBuffer: Buffer;
|
||||||
|
fileArg.ondata = async (dataArg, dat, final) => {
|
||||||
|
resultBuffer? resultBuffer = Buffer.concat([resultBuffer, Buffer.from(dat)])
|
||||||
|
: resultBuffer = Buffer.from(dat);
|
||||||
|
if (final) {
|
||||||
|
const streamFile = plugins.smartfile.StreamFile.fromBuffer(resultBuffer);
|
||||||
|
streamFile.relativeFilePath = fileArg.name;
|
||||||
|
this.streamtools.push(fileArg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fileArg.start();
|
||||||
|
});
|
||||||
|
constructor() {
|
||||||
|
super({
|
||||||
|
objectMode: true,
|
||||||
|
writeFunction: async (chunkArg: Buffer, streamtoolsArg) => {
|
||||||
|
this.streamtools? null : this.streamtools = streamtoolsArg;
|
||||||
|
this.unzipper.push(chunkArg, false);
|
||||||
|
},
|
||||||
|
finalFunction: async () => {
|
||||||
|
this.unzipper.push(Buffer.from(''), true);
|
||||||
|
await plugins.smartdelay.delayFor(0);
|
||||||
|
this.streamtools.push(null);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
this.unzipper.register(plugins.fflate.UnzipInflate);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This class wraps fflate's zip in a Node.js Transform stream for compression
|
||||||
|
export class CompressZipTransform extends plugins.stream.Transform {
|
||||||
|
files: { [fileName: string]: Uint8Array };
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
this.files = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
_transform(chunk: Buffer, encoding: BufferEncoding, callback: plugins.stream.TransformCallback) {
|
||||||
|
// Simple example: storing chunks in memory before finalizing ZIP in _flush
|
||||||
|
this.files['file.txt'] = new Uint8Array(chunk);
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
|
||||||
|
_flush(callback: plugins.stream.TransformCallback) {
|
||||||
|
plugins.fflate.zip(this.files, (err, zipped) => {
|
||||||
|
if (err) {
|
||||||
|
callback(err);
|
||||||
|
} else {
|
||||||
|
this.push(Buffer.from(zipped));
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ZipTools {
|
||||||
|
smartArchiveRef: SmartArchive;
|
||||||
|
|
||||||
|
constructor(smartArchiveRefArg: SmartArchive) {
|
||||||
|
this.smartArchiveRef = smartArchiveRefArg;
|
||||||
|
}
|
||||||
|
|
||||||
|
public getCompressionStream() {
|
||||||
|
return new CompressZipTransform();
|
||||||
|
}
|
||||||
|
|
||||||
|
public getDecompressionStream() {
|
||||||
|
return new DecompressZipTransform();
|
||||||
|
}
|
||||||
|
}
|
@ -6,6 +6,7 @@ export { path, stream };
|
|||||||
|
|
||||||
// @pushrocks scope
|
// @pushrocks scope
|
||||||
import * as smartfile from '@push.rocks/smartfile';
|
import * as smartfile from '@push.rocks/smartfile';
|
||||||
|
import * as smartdelay from '@push.rocks/smartdelay';
|
||||||
import * as smartpath from '@push.rocks/smartpath';
|
import * as smartpath from '@push.rocks/smartpath';
|
||||||
import * as smartpromise from '@push.rocks/smartpromise';
|
import * as smartpromise from '@push.rocks/smartpromise';
|
||||||
import * as smartrequest from '@push.rocks/smartrequest';
|
import * as smartrequest from '@push.rocks/smartrequest';
|
||||||
@ -14,7 +15,7 @@ import * as smartstream from '@push.rocks/smartstream';
|
|||||||
import * as smartrx from '@push.rocks/smartrx';
|
import * as smartrx from '@push.rocks/smartrx';
|
||||||
import * as smarturl from '@push.rocks/smarturl';
|
import * as smarturl from '@push.rocks/smarturl';
|
||||||
|
|
||||||
export { smartfile, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx, smarturl };
|
export { smartfile, smartdelay, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx, smarturl };
|
||||||
|
|
||||||
// third party scope
|
// third party scope
|
||||||
import * as fileType from 'file-type';
|
import * as fileType from 'file-type';
|
||||||
|
Loading…
Reference in New Issue
Block a user