fix(core): update
This commit is contained in:
parent
25e847a9ea
commit
d53c46fa82
19
package.json
19
package.json
@ -21,25 +21,26 @@
|
|||||||
},
|
},
|
||||||
"homepage": "https://github.com/pushrocks/smartarchive#readme",
|
"homepage": "https://github.com/pushrocks/smartarchive#readme",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@push.rocks/smartfile": "^11.0.0",
|
"@push.rocks/smartdelay": "^3.0.5",
|
||||||
|
"@push.rocks/smartfile": "^11.0.4",
|
||||||
"@push.rocks/smartpath": "^5.0.11",
|
"@push.rocks/smartpath": "^5.0.11",
|
||||||
"@push.rocks/smartpromise": "^4.0.3",
|
"@push.rocks/smartpromise": "^4.0.3",
|
||||||
"@push.rocks/smartrequest": "^2.0.21",
|
"@push.rocks/smartrequest": "^2.0.21",
|
||||||
"@push.rocks/smartrx": "^3.0.7",
|
"@push.rocks/smartrx": "^3.0.7",
|
||||||
"@push.rocks/smartstream": "^3.0.30",
|
"@push.rocks/smartstream": "^3.0.34",
|
||||||
"@push.rocks/smartunique": "^3.0.6",
|
"@push.rocks/smartunique": "^3.0.8",
|
||||||
"@push.rocks/smarturl": "^3.0.7",
|
"@push.rocks/smarturl": "^3.0.7",
|
||||||
"@types/tar-stream": "^3.1.3",
|
"@types/tar-stream": "^3.1.3",
|
||||||
"fflate": "^0.8.1",
|
"fflate": "^0.8.2",
|
||||||
"file-type": "^18.7.0",
|
"file-type": "^19.0.0",
|
||||||
"tar-stream": "^3.1.6",
|
"tar-stream": "^3.1.7",
|
||||||
"through": "^2.3.8"
|
"through": "^2.3.8"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.1.66",
|
"@git.zone/tsbuild": "^2.1.72",
|
||||||
"@git.zone/tsrun": "^1.2.44",
|
"@git.zone/tsrun": "^1.2.44",
|
||||||
"@git.zone/tstest": "^1.0.84",
|
"@git.zone/tstest": "^1.0.88",
|
||||||
"@push.rocks/tapbundle": "^5.0.15"
|
"@push.rocks/tapbundle": "^5.0.17"
|
||||||
},
|
},
|
||||||
"private": false,
|
"private": false,
|
||||||
"files": [
|
"files": [
|
||||||
|
2191
pnpm-lock.yaml
generated
2191
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@push.rocks/smartarchive',
|
name: '@push.rocks/smartarchive',
|
||||||
version: '4.0.19',
|
version: '4.0.20',
|
||||||
description: 'work with archives'
|
description: 'work with archives'
|
||||||
}
|
}
|
||||||
|
@ -36,6 +36,8 @@ export class ArchiveAnalyzer {
|
|||||||
switch (mimeTypeArg) {
|
switch (mimeTypeArg) {
|
||||||
case 'application/gzip':
|
case 'application/gzip':
|
||||||
return this.smartArchiveRef.gzipTools.getDecompressionStream();
|
return this.smartArchiveRef.gzipTools.getDecompressionStream();
|
||||||
|
case 'application/zip':
|
||||||
|
return this.smartArchiveRef.zipTools.getDecompressionStream();
|
||||||
case 'application/x-bzip2':
|
case 'application/x-bzip2':
|
||||||
return await this.smartArchiveRef.bzip2Tools.getDecompressionStream(); // replace with your own bzip2 decompression stream
|
return await this.smartArchiveRef.bzip2Tools.getDecompressionStream(); // replace with your own bzip2 decompression stream
|
||||||
case 'application/x-tar':
|
case 'application/x-tar':
|
||||||
@ -56,6 +58,9 @@ export class ArchiveAnalyzer {
|
|||||||
firstRun = false;
|
firstRun = false;
|
||||||
const fileType = await plugins.fileType.fileTypeFromBuffer(chunkArg);
|
const fileType = await plugins.fileType.fileTypeFromBuffer(chunkArg);
|
||||||
const decompressionStream = await this.getDecompressionStream(fileType?.mime as any);
|
const decompressionStream = await this.getDecompressionStream(fileType?.mime as any);
|
||||||
|
/**
|
||||||
|
* analyzed stream emits once with this object
|
||||||
|
*/
|
||||||
const result: IAnalyzedResult = {
|
const result: IAnalyzedResult = {
|
||||||
fileType,
|
fileType,
|
||||||
isArchive: await this.mimeTypeIsArchive(fileType?.mime),
|
isArchive: await this.mimeTypeIsArchive(fileType?.mime),
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
import * as plugins from './plugins.js';
|
import * as plugins from './plugins.js';
|
||||||
import * as paths from './paths.js';
|
import * as paths from './paths.js';
|
||||||
|
|
||||||
|
import { Bzip2Tools } from './classes.bzip2tools.js';
|
||||||
import { GzipTools } from './classes.gziptools.js';
|
import { GzipTools } from './classes.gziptools.js';
|
||||||
import { TarTools } from './classes.tartools.js';
|
import { TarTools } from './classes.tartools.js';
|
||||||
import { Bzip2Tools } from './classes.bzip2tools.js';
|
import { ZipTools } from './classes.ziptools.js';
|
||||||
|
|
||||||
import { ArchiveAnalyzer, type IAnalyzedResult } from './classes.archiveanalyzer.js';
|
import { ArchiveAnalyzer, type IAnalyzedResult } from './classes.archiveanalyzer.js';
|
||||||
|
|
||||||
@ -32,9 +33,10 @@ export class SmartArchive {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// INSTANCE
|
// INSTANCE
|
||||||
public tarTools = new TarTools(this);
|
|
||||||
public gzipTools = new GzipTools(this);
|
public gzipTools = new GzipTools(this);
|
||||||
public bzip2Tools = new Bzip2Tools(this);
|
public bzip2Tools = new Bzip2Tools(this);
|
||||||
|
public tarTools = new TarTools(this);
|
||||||
|
public zipTools = new ZipTools(this);
|
||||||
public archiveAnalyzer = new ArchiveAnalyzer(this);
|
public archiveAnalyzer = new ArchiveAnalyzer(this);
|
||||||
|
|
||||||
public sourceUrl: string;
|
public sourceUrl: string;
|
||||||
@ -77,27 +79,32 @@ export class SmartArchive {
|
|||||||
public async exportToFs(targetDir: string, fileNameArg?: string): Promise<void> {
|
public async exportToFs(targetDir: string, fileNameArg?: string): Promise<void> {
|
||||||
const done = plugins.smartpromise.defer<void>();
|
const done = plugins.smartpromise.defer<void>();
|
||||||
const streamFileStream = await this.exportToStreamOfStreamFiles();
|
const streamFileStream = await this.exportToStreamOfStreamFiles();
|
||||||
streamFileStream.pipe(new plugins.smartstream.SmartDuplex({
|
streamFileStream.pipe(
|
||||||
objectMode: true,
|
new plugins.smartstream.SmartDuplex({
|
||||||
writeFunction: async (chunkArg: plugins.smartfile.StreamFile, streamtools) => {
|
objectMode: true,
|
||||||
const done = plugins.smartpromise.defer<void>();
|
writeFunction: async (chunkArg: plugins.smartfile.StreamFile, streamtools) => {
|
||||||
console.log(chunkArg.relativeFilePath ? chunkArg.relativeFilePath : 'no relative path');
|
const done = plugins.smartpromise.defer<void>();
|
||||||
const streamFile = chunkArg;
|
console.log(chunkArg.relativeFilePath ? chunkArg.relativeFilePath : 'no relative path');
|
||||||
const readStream = await streamFile.createReadStream();
|
const streamFile = chunkArg;
|
||||||
await plugins.smartfile.fs.ensureDir(targetDir);
|
const readStream = await streamFile.createReadStream();
|
||||||
const writePath = plugins.path.join(targetDir, (streamFile.relativeFilePath || fileNameArg));
|
await plugins.smartfile.fs.ensureDir(targetDir);
|
||||||
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(writePath));
|
const writePath = plugins.path.join(
|
||||||
const writeStream = plugins.smartfile.fsStream.createWriteStream(writePath);
|
targetDir,
|
||||||
readStream.pipe(writeStream);
|
streamFile.relativeFilePath || fileNameArg
|
||||||
writeStream.on('finish', () => {
|
);
|
||||||
|
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(writePath));
|
||||||
|
const writeStream = plugins.smartfile.fsStream.createWriteStream(writePath);
|
||||||
|
readStream.pipe(writeStream);
|
||||||
|
writeStream.on('finish', () => {
|
||||||
|
done.resolve();
|
||||||
|
});
|
||||||
|
await done.promise;
|
||||||
|
},
|
||||||
|
finalFunction: async () => {
|
||||||
done.resolve();
|
done.resolve();
|
||||||
})
|
},
|
||||||
await done.promise;
|
})
|
||||||
},
|
);
|
||||||
finalFunction: async () => {
|
|
||||||
done.resolve();
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
return done.promise;
|
return done.promise;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -114,21 +121,27 @@ export class SmartArchive {
|
|||||||
async (analyzedResultChunk) => {
|
async (analyzedResultChunk) => {
|
||||||
if (analyzedResultChunk.fileType?.mime === 'application/x-tar') {
|
if (analyzedResultChunk.fileType?.mime === 'application/x-tar') {
|
||||||
const tarStream = analyzedResultChunk.decompressionStream as plugins.tarStream.Extract;
|
const tarStream = analyzedResultChunk.decompressionStream as plugins.tarStream.Extract;
|
||||||
tarStream.on(
|
tarStream.on('entry', async (header, stream, next) => {
|
||||||
'entry',
|
const streamfile = plugins.smartfile.StreamFile.fromStream(stream, header.name);
|
||||||
async (header, stream, next) => {
|
streamFileIntake.push(streamfile);
|
||||||
const streamfile = plugins.smartfile.StreamFile.fromStream(stream, header.name);
|
stream.on('end', function () {
|
||||||
streamFileIntake.push(streamfile);
|
next(); // ready for next entry
|
||||||
stream.on('end', function () {
|
});
|
||||||
next(); // ready for next entry
|
});
|
||||||
});
|
|
||||||
}
|
|
||||||
);
|
|
||||||
tarStream.on('finish', function () {
|
tarStream.on('finish', function () {
|
||||||
console.log('finished');
|
console.log('finished');
|
||||||
streamFileIntake.signalEnd();
|
streamFileIntake.signalEnd();
|
||||||
});
|
});
|
||||||
analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream);
|
analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream);
|
||||||
|
} else if (analyzedResultChunk.fileType?.mime === 'application/zip') {
|
||||||
|
analyzedResultChunk.resultStream
|
||||||
|
.pipe(analyzedResultChunk.decompressionStream)
|
||||||
|
.pipe(new plugins.smartstream.SmartDuplex({
|
||||||
|
objectMode: true,
|
||||||
|
writeFunction: async (streamFileArg: plugins.smartfile.StreamFile, streamtools) => {
|
||||||
|
streamFileIntake.push(streamFileArg);
|
||||||
|
},
|
||||||
|
}));
|
||||||
} else if (analyzedResultChunk.isArchive && analyzedResultChunk.decompressionStream) {
|
} else if (analyzedResultChunk.isArchive && analyzedResultChunk.decompressionStream) {
|
||||||
analyzedResultChunk.resultStream
|
analyzedResultChunk.resultStream
|
||||||
.pipe(analyzedResultChunk.decompressionStream)
|
.pipe(analyzedResultChunk.decompressionStream)
|
||||||
@ -142,8 +155,9 @@ export class SmartArchive {
|
|||||||
streamFileIntake.push(streamFile);
|
streamFileIntake.push(streamFile);
|
||||||
streamFileIntake.signalEnd();
|
streamFileIntake.signalEnd();
|
||||||
}
|
}
|
||||||
}, {
|
},
|
||||||
objectMode: true
|
{
|
||||||
|
objectMode: true,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
77
ts/classes.ziptools.ts
Normal file
77
ts/classes.ziptools.ts
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
import type { SmartArchive } from './classes.smartarchive.js';
|
||||||
|
import * as plugins from './plugins.js';
|
||||||
|
|
||||||
|
class DecompressZipTransform extends plugins.smartstream.SmartDuplex<ArrayBufferLike> {
|
||||||
|
private streamtools: plugins.smartstream.IStreamTools;
|
||||||
|
private unzipper = new plugins.fflate.Unzip(async (fileArg) => {
|
||||||
|
let resultBuffer: Buffer;
|
||||||
|
fileArg.ondata = async (dataArg, dat, final) => {
|
||||||
|
resultBuffer? resultBuffer = Buffer.concat([resultBuffer, Buffer.from(dat)])
|
||||||
|
: resultBuffer = Buffer.from(dat);
|
||||||
|
if (final) {
|
||||||
|
const streamFile = plugins.smartfile.StreamFile.fromBuffer(resultBuffer);
|
||||||
|
streamFile.relativeFilePath = fileArg.name;
|
||||||
|
this.streamtools.push(fileArg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fileArg.start();
|
||||||
|
});
|
||||||
|
constructor() {
|
||||||
|
super({
|
||||||
|
objectMode: true,
|
||||||
|
writeFunction: async (chunkArg: Buffer, streamtoolsArg) => {
|
||||||
|
this.streamtools? null : this.streamtools = streamtoolsArg;
|
||||||
|
this.unzipper.push(chunkArg, false);
|
||||||
|
},
|
||||||
|
finalFunction: async () => {
|
||||||
|
this.unzipper.push(Buffer.from(''), true);
|
||||||
|
await plugins.smartdelay.delayFor(0);
|
||||||
|
this.streamtools.push(null);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
this.unzipper.register(plugins.fflate.UnzipInflate);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This class wraps fflate's zip in a Node.js Transform stream for compression
|
||||||
|
export class CompressZipTransform extends plugins.stream.Transform {
|
||||||
|
files: { [fileName: string]: Uint8Array };
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
this.files = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
_transform(chunk: Buffer, encoding: BufferEncoding, callback: plugins.stream.TransformCallback) {
|
||||||
|
// Simple example: storing chunks in memory before finalizing ZIP in _flush
|
||||||
|
this.files['file.txt'] = new Uint8Array(chunk);
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
|
||||||
|
_flush(callback: plugins.stream.TransformCallback) {
|
||||||
|
plugins.fflate.zip(this.files, (err, zipped) => {
|
||||||
|
if (err) {
|
||||||
|
callback(err);
|
||||||
|
} else {
|
||||||
|
this.push(Buffer.from(zipped));
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ZipTools {
|
||||||
|
smartArchiveRef: SmartArchive;
|
||||||
|
|
||||||
|
constructor(smartArchiveRefArg: SmartArchive) {
|
||||||
|
this.smartArchiveRef = smartArchiveRefArg;
|
||||||
|
}
|
||||||
|
|
||||||
|
public getCompressionStream() {
|
||||||
|
return new CompressZipTransform();
|
||||||
|
}
|
||||||
|
|
||||||
|
public getDecompressionStream() {
|
||||||
|
return new DecompressZipTransform();
|
||||||
|
}
|
||||||
|
}
|
@ -6,6 +6,7 @@ export { path, stream };
|
|||||||
|
|
||||||
// @pushrocks scope
|
// @pushrocks scope
|
||||||
import * as smartfile from '@push.rocks/smartfile';
|
import * as smartfile from '@push.rocks/smartfile';
|
||||||
|
import * as smartdelay from '@push.rocks/smartdelay';
|
||||||
import * as smartpath from '@push.rocks/smartpath';
|
import * as smartpath from '@push.rocks/smartpath';
|
||||||
import * as smartpromise from '@push.rocks/smartpromise';
|
import * as smartpromise from '@push.rocks/smartpromise';
|
||||||
import * as smartrequest from '@push.rocks/smartrequest';
|
import * as smartrequest from '@push.rocks/smartrequest';
|
||||||
@ -14,7 +15,7 @@ import * as smartstream from '@push.rocks/smartstream';
|
|||||||
import * as smartrx from '@push.rocks/smartrx';
|
import * as smartrx from '@push.rocks/smartrx';
|
||||||
import * as smarturl from '@push.rocks/smarturl';
|
import * as smarturl from '@push.rocks/smarturl';
|
||||||
|
|
||||||
export { smartfile, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx, smarturl };
|
export { smartfile, smartdelay, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx, smarturl };
|
||||||
|
|
||||||
// third party scope
|
// third party scope
|
||||||
import * as fileType from 'file-type';
|
import * as fileType from 'file-type';
|
||||||
|
Loading…
Reference in New Issue
Block a user