BREAKING CHANGE(core): update

This commit is contained in:
Philipp Kunz 2023-11-06 18:14:21 +01:00
parent 35f66736f0
commit a9bd693065
18 changed files with 2373 additions and 1496 deletions

View File

@ -119,6 +119,6 @@ jobs:
run: | run: |
npmci node install stable npmci node install stable
npmci npm install npmci npm install
pnpm install -g @gitzone/tsdoc pnpm install -g @git.zone/tsdoc
npmci command tsdoc npmci command tsdoc
continue-on-error: true continue-on-error: true

2
dist_ts/index.d.ts vendored
View File

@ -1 +1 @@
export * from './smartarchive.classes.smartarchive.js'; export * from './classes.smartarchive.js';

View File

@ -1,2 +1,2 @@
export * from './smartarchive.classes.smartarchive.js'; export * from './classes.smartarchive.js';
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxjQUFjLHdDQUF3QyxDQUFDIn0= //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxjQUFjLDJCQUEyQixDQUFDIn0=

View File

@ -21,24 +21,26 @@
}, },
"homepage": "https://github.com/pushrocks/smartarchive#readme", "homepage": "https://github.com/pushrocks/smartarchive#readme",
"dependencies": { "dependencies": {
"@push.rocks/smartfile": "^10.0.28", "@push.rocks/smartfile": "^11.0.0",
"@push.rocks/smartpath": "^5.0.11", "@push.rocks/smartpath": "^5.0.11",
"@push.rocks/smartpromise": "^4.0.3", "@push.rocks/smartpromise": "^4.0.3",
"@push.rocks/smartrequest": "^2.0.18", "@push.rocks/smartrequest": "^2.0.20",
"@push.rocks/smartrx": "^3.0.6", "@push.rocks/smartrx": "^3.0.7",
"@push.rocks/smartstream": "^2.0.4", "@push.rocks/smartstream": "^3.0.7",
"@push.rocks/smartunique": "^3.0.3", "@push.rocks/smartunique": "^3.0.6",
"@types/gunzip-maybe": "^1.4.0", "@push.rocks/smarturl": "^3.0.7",
"@types/tar-stream": "^2.2.2", "@types/tar-stream": "^3.1.2",
"gunzip-maybe": "^1.4.2", "@types/unbzip2-stream": "^1.4.2",
"tar": "^6.1.15", "fflate": "^0.8.1",
"tar-stream": "^3.1.6" "file-type": "^18.6.0",
"tar-stream": "^3.1.6",
"unbzip2-stream": "^1.4.3"
}, },
"devDependencies": { "devDependencies": {
"@gitzone/tsbuild": "^2.1.66", "@git.zone/tsbuild": "^2.1.66",
"@gitzone/tsrun": "^1.2.44", "@git.zone/tsrun": "^1.2.44",
"@gitzone/tstest": "^1.0.77", "@git.zone/tstest": "^1.0.77",
"@push.rocks/tapbundle": "^5.0.12" "@push.rocks/tapbundle": "^5.0.15"
}, },
"private": false, "private": false,
"files": [ "files": [

3222
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

13
test/plugins.ts Normal file
View File

@ -0,0 +1,13 @@
import * as path from 'path';
import * as smartpath from '@push.rocks/smartpath';
import * as smartfile from '@push.rocks/smartfile';
import * as smartrequest from '@push.rocks/smartrequest';
import * as smartstream from '@push.rocks/smartstream';
export {
path,
smartpath,
smartfile,
smartrequest,
smartstream,
}

View File

@ -1,23 +1,14 @@
import { tap, expect } from '@push.rocks/tapbundle'; import { tap, expect } from '@push.rocks/tapbundle';
import * as path from 'path'; import * as plugins from './plugins.js';
import * as smartpath from '@push.rocks/smartpath';
import * as smartfile from '@push.rocks/smartfile';
import * as smartrequest from '@push.rocks/smartrequest';
const testPlugins = {
path,
smartfile,
smartrequest,
};
const testPaths = { const testPaths = {
nogitDir: testPlugins.path.join( nogitDir: plugins.path.join(
smartpath.get.dirnameFromImportMetaUrl(import.meta.url), plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
'../.nogit/' '../.nogit/'
), ),
remoteDir: testPlugins.path.join( remoteDir: plugins.path.join(
smartpath.get.dirnameFromImportMetaUrl(import.meta.url), plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
'../.nogit/remote' '../.nogit/remote'
), ),
}; };
@ -25,80 +16,40 @@ const testPaths = {
import * as smartarchive from '../ts/index.js'; import * as smartarchive from '../ts/index.js';
tap.preTask('should prepare .nogit dir', async () => { tap.preTask('should prepare .nogit dir', async () => {
await testPlugins.smartfile.fs.ensureDir(testPaths.remoteDir); await plugins.smartfile.fs.ensureDir(testPaths.remoteDir);
}); });
tap.preTask('should prepare downloads', async (tools) => { tap.preTask('should prepare downloads', async (tools) => {
const downloadedFile: Buffer = ( const downloadedFile: Buffer = (
await testPlugins.smartrequest.getBinary( await plugins.smartrequest.getBinary(
'https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz' 'https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz'
) )
).body; ).body;
await testPlugins.smartfile.memory.toFs( await plugins.smartfile.memory.toFs(
downloadedFile, downloadedFile,
testPlugins.path.join(testPaths.nogitDir, 'test.tgz') plugins.path.join(testPaths.nogitDir, 'test.tgz')
); );
}); });
tap.test('should extract existing files on disk', async () => { tap.test('should extract existing files on disk', async () => {
const testSmartarchive = new smartarchive.SmartArchive(); const testSmartarchive = await smartarchive.SmartArchive.fromArchiveUrl(
await testSmartarchive.extractArchiveFromFilePathToFs(
testPlugins.path.join(testPaths.nogitDir, 'test.tgz'),
testPlugins.path.join(testPaths.nogitDir)
);
});
tap.test('should download a package from the registry', async () => {
const testSmartarchive = new smartarchive.SmartArchive();
await testSmartarchive.extractArchiveFromUrlToFs(
'https://verdaccio.lossless.digital/@pushrocks%2fsmartfile/-/smartfile-7.0.11.tgz',
testPaths.remoteDir
);
});
tap.test('should extract a package using tarStream', async (tools) => {
const done = tools.defer();
const testSmartarchive = new smartarchive.SmartArchive();
const testTgzBuffer = (
await testPlugins.smartfile.Smartfile.fromFilePath(
testPlugins.path.join(testPaths.nogitDir, 'test.tgz')
)
).contentBuffer;
const extractionFileObservable = await testSmartarchive.extractArchiveFromBufferToObservable(
testTgzBuffer
);
const subscription = extractionFileObservable.subscribe(
(file) => {
console.log(file.path);
},
(err) => {
console.log(err);
},
() => {
done.resolve();
}
);
await done.promise;
});
tap.test('should extract a file from url to replaySubject', async (tools) => {
const done = tools.defer();
const testSmartarchive = new smartarchive.SmartArchive();
const extractionFileObservable = await testSmartarchive.extractArchiveFromUrlToObservable(
'https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz' 'https://verdaccio.lossless.digital/@pushrocks%2fwebsetup/-/websetup-2.0.14.tgz'
); );
const subscription = extractionFileObservable.subscribe( const streamfileStream = await testSmartarchive.exportToStreamOfStreamFiles();
(file) => {
console.log(file.path); streamfileStream.pipe(new plugins.smartstream.SmartDuplex({
objectMode: true,
writeAndTransformFunction: async (chunkArg: plugins.smartfile.StreamFile, streamtools) => {
console.log(chunkArg.relativeFilePath);
const streamFile = chunkArg;
const readStream = await streamFile.createReadStream();
const writePath = plugins.path.join(testPaths.nogitDir + streamFile.relativeFilePath);
const dir = plugins.path.parse(writePath).dir;
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(dir));
const writeStream = plugins.smartfile.fsStream.createWriteStream(writePath);
readStream.pipe(writeStream);
}, },
(err) => { }));
console.log(err);
},
() => {
done.resolve();
}
);
await done.promise;
}); });
tap.start(); tap.start();

View File

@ -3,6 +3,6 @@
*/ */
export const commitinfo = { export const commitinfo = {
name: '@push.rocks/smartarchive', name: '@push.rocks/smartarchive',
version: '3.0.8', version: '4.0.0',
description: 'work with archives' description: 'work with archives'
} }

View File

@ -0,0 +1,74 @@
import type { SmartArchive } from './classes.smartarchive.js';
import * as plugins from './plugins.js';
export interface IAnalyzedResult {
fileType: plugins.fileType.FileTypeResult;
isArchive: boolean;
resultStream: plugins.smartstream.PassThrough;
decompressionStream: plugins.stream.Transform | plugins.stream.Duplex | plugins.tarStream.Extract;
}
export class ArchiveAnalyzer {
smartArchiveRef: SmartArchive;
constructor(smartArchiveRefArg: SmartArchive) {
this.smartArchiveRef = smartArchiveRefArg;
}
private async mimeTypeIsArchive(mimeType: string): Promise<boolean> {
const archiveMimeTypes: Set<string> = new Set([
'application/zip',
'application/x-rar-compressed',
'application/x-tar',
'application/gzip',
'application/x-7z-compressed',
'application/x-bzip2',
// Add other archive mime types here
]);
return archiveMimeTypes.has(mimeType);
}
private getDecompressionStream(
mimeTypeArg: plugins.fileType.FileTypeResult['mime']
): plugins.stream.Transform | plugins.stream.Duplex | plugins.tarStream.Extract {
switch (mimeTypeArg) {
case 'application/gzip':
return this.smartArchiveRef.gzipTools.getDecompressionStream();
case 'application/x-bzip2':
return this.smartArchiveRef.bzip2Tools.getDecompressionStream(); // replace with your own bzip2 decompression stream
case 'application/x-tar':
return this.smartArchiveRef.tarTools.getDecompressionStream(); // replace with your own tar decompression stream
default:
// Handle unsupported formats or no decompression needed
return new plugins.smartstream.PassThrough();
}
}
public getAnalyzedStream() {
let firstRun = true;
const resultStream = new plugins.smartstream.PassThrough();
const analyzerstream = new plugins.smartstream.SmartDuplex<Buffer, IAnalyzedResult>({
readableObjectMode: true,
writeAndTransformFunction: async (chunkArg: Buffer, streamtools) => {
const fileType = await plugins.fileType.fileTypeFromBuffer(chunkArg);
const decompressionStream = this.getDecompressionStream(fileType.mime as any);
resultStream.push(chunkArg);
if (firstRun) {
firstRun = false;
const result: IAnalyzedResult = {
fileType,
isArchive: await this.mimeTypeIsArchive(fileType.mime),
resultStream,
decompressionStream,
};
streamtools.push(result);
streamtools.push(null);
return null;
}
},
});
return analyzerstream;
}
}

45
ts/classes.bzip2tools.ts Normal file
View File

@ -0,0 +1,45 @@
import type { SmartArchive } from './classes.smartarchive.js';
import * as plugins from './plugins.js';
export class DecompressBzip2Transform extends plugins.stream.Transform {
private bzip2Decompressor: plugins.stream.Transform;
constructor() {
super();
// Initialize the bzip2 decompressor once here
this.bzip2Decompressor = plugins.unbzip2Stream();
this.bzip2Decompressor.on('data', (data: Buffer) => {
// When data is decompressed, push it to the stream
this.push(data);
});
this.bzip2Decompressor.on('error', (err) => {
// If an error occurs, emit it on this stream
this.emit('error', err);
});
}
_transform(chunk: Buffer, encoding: BufferEncoding, callback: plugins.stream.TransformCallback) {
// Pass the chunk directly to the decompressor
// The decompressor will handle the state across chunks
this.bzip2Decompressor.write(chunk);
callback();
}
_flush(callback: plugins.stream.TransformCallback) {
// When the stream is ending, end the decompressor stream as well
this.bzip2Decompressor.end();
callback();
}
}
export class Bzip2Tools {
smartArchiveRef: SmartArchive;
constructor(smartArchiveRefArg: SmartArchive) {
this.smartArchiveRef = smartArchiveRefArg;
}
getDecompressionStream() {
return new DecompressBzip2Transform();
}
}

59
ts/classes.gziptools.ts Normal file
View File

@ -0,0 +1,59 @@
import type { SmartArchive } from './classes.smartarchive.js';
import * as plugins from './plugins.js'
// This class wraps fflate's gunzip in a Node.js Transform stream
export class CompressGunzipTransform extends plugins.stream.Transform {
constructor() {
super();
}
_transform(chunk: Buffer, encoding: BufferEncoding, callback: plugins.stream.TransformCallback) {
plugins.fflate.gunzip(chunk, (err, decompressed) => {
if (err) {
callback(err);
} else {
this.push(decompressed);
callback();
}
});
}
}
// DecompressGunzipTransform class that extends the Node.js Transform stream to
// create a stream that decompresses GZip-compressed data using fflate's gunzip function
export class DecompressGunzipTransform extends plugins.stream.Transform {
constructor() {
super();
}
_transform(chunk: Buffer, encoding: BufferEncoding, callback: plugins.stream.TransformCallback) {
// Use fflate's gunzip function to decompress the chunk
plugins.fflate.gunzip(chunk, (err, decompressed) => {
if (err) {
// If an error occurs during decompression, pass the error to the callback
callback(err);
} else {
// If decompression is successful, push the decompressed data into the stream
this.push(decompressed);
callback();
}
});
}
}
export class GzipTools {
smartArchiveRef: SmartArchive;
constructor(smartArchiveRefArg: SmartArchive) {
this.smartArchiveRef = smartArchiveRefArg;
}
public getCompressionStream() {
return new CompressGunzipTransform();
}
public getDecompressionStream() {
return new DecompressGunzipTransform();
}
}

120
ts/classes.smartarchive.ts Normal file
View File

@ -0,0 +1,120 @@
import * as plugins from './plugins.js';
import * as paths from './paths.js';
import { GzipTools } from './classes.gziptools.js';
import { TarTools } from './classes.tartools.js';
import { Bzip2Tools } from './classes.bzip2tools.js';
import { ArchiveAnalyzer, type IAnalyzedResult } from './classes.archiveanalyzer.js';
import type { from } from '@push.rocks/smartrx/dist_ts/smartrx.plugins.rxjs.js';
export class SmartArchive {
// STATIC
public static async fromArchiveUrl(urlArg: string): Promise<SmartArchive> {
const smartArchiveInstance = new SmartArchive();
smartArchiveInstance.sourceUrl = urlArg;
return smartArchiveInstance;
}
public static async fromArchiveFile(filePathArg: string): Promise<SmartArchive> {
const smartArchiveInstance = new SmartArchive();
smartArchiveInstance.sourceFilePath = filePathArg;
return smartArchiveInstance;
}
public static async fromArchiveStream(
streamArg: plugins.stream.Readable | plugins.stream.Duplex | plugins.stream.Transform
): Promise<SmartArchive> {
const smartArchiveInstance = new SmartArchive();
smartArchiveInstance.sourceStream = streamArg;
return smartArchiveInstance;
}
// INSTANCE
public tarTools = new TarTools(this);
public gzipTools = new GzipTools(this);
public bzip2Tools = new Bzip2Tools(this);
public archiveAnalyzer = new ArchiveAnalyzer(this);
public sourceUrl: string;
public sourceFilePath: string;
public sourceStream: plugins.stream.Readable | plugins.stream.Duplex | plugins.stream.Transform;
public archiveName: string;
public singleFileMode: boolean = false;
public addedDirectories: string[] = [];
public addedFiles: (plugins.smartfile.SmartFile | plugins.smartfile.StreamFile)[] = [];
public addedUrls: string[] = [];
constructor() {}
/**
* gets the original archive stream
*/
public async getArchiveStream() {
if (this.sourceStream) {
return this.sourceStream;
}
if (this.sourceUrl) {
const urlStream = await plugins.smartrequest.getStream(this.sourceUrl);
return urlStream;
}
if (this.sourceFilePath) {
const fileStream = plugins.smartfile.fs.toReadStream(this.sourceFilePath);
return fileStream;
}
}
public async exportToTarGzStream() {
const tarPackStream = await this.tarTools.getPackStream();
const gzipStream = await this.gzipTools.getCompressionStream();
// const archiveStream = tarPackStream.pipe(gzipStream);
// return archiveStream;
}
public async exportToFs(targetDir: string): Promise<void> {}
public async exportToStreamOfStreamFiles() {
const streamFileIntake = new plugins.smartstream.StreamIntake<plugins.smartfile.StreamFile>({
objectMode: true,
});
const archiveStream = await this.getArchiveStream();
const createAnalyzedStream = () => this.archiveAnalyzer.getAnalyzedStream();
// lets create a function that can be called multiple times to unpack layers of archives
const createUnpackStream = () =>
plugins.smartstream.createTransformFunction<IAnalyzedResult, any>(
async (analyzedResultChunk) => {
if (analyzedResultChunk.fileType.mime === 'application/x-tar') {
(analyzedResultChunk.decompressionStream as plugins.tarStream.Extract).on(
'entry',
async (header, stream, next) => {
const streamfile = plugins.smartfile.StreamFile.fromStream(stream, header.name);
streamFileIntake.push(streamfile);
stream.on('end', function () {
next(); // ready for next entry
});
}
);
analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream);
} else if (analyzedResultChunk.isArchive && analyzedResultChunk.decompressionStream) {
analyzedResultChunk.resultStream
.pipe(analyzedResultChunk.decompressionStream)
.pipe(createAnalyzedStream())
.pipe(createUnpackStream());
} else {
const streamFile = plugins.smartfile.StreamFile.fromStream(
analyzedResultChunk.resultStream,
analyzedResultChunk.fileType.ext
);
streamFileIntake.push(streamFile);
}
}
);
archiveStream.pipe(createAnalyzedStream()).pipe(createUnpackStream());
return streamFileIntake;
}
}

36
ts/classes.tartools.ts Normal file
View File

@ -0,0 +1,36 @@
import type { SmartArchive } from './classes.smartarchive.js';
import * as plugins from './plugins.js';
export class TarTools {
smartArchiveRef: SmartArchive;
constructor(smartArchiveRefArg: SmartArchive) {
this.smartArchiveRef = smartArchiveRefArg;
}
// packing
public addFileToPack(pack: plugins.tarStream.Pack, fileName: string, content: string | Buffer) {
return new Promise<void>((resolve, reject) => {
const entry = pack.entry({ name: fileName, size: content.length }, (err: Error) => {
if (err) {
reject(err);
} else {
resolve();
}
});
entry.write(content);
entry.end();
});
}
public async getPackStream() {
const pack = plugins.tarStream.pack();
return pack;
}
// extracting
getDecompressionStream() {
return plugins.tarStream.extract();
}
}

View File

@ -1 +1 @@
export * from './smartarchive.classes.smartarchive.js'; export * from './classes.smartarchive.js';

View File

@ -1,4 +1,4 @@
import * as plugins from './smartarchive.plugins.js'; import * as plugins from './plugins.js';
export const packageDir = plugins.path.join( export const packageDir = plugins.path.join(
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url), plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),

View File

@ -1,7 +1,8 @@
// node native scope // node native scope
import * as path from 'path'; import * as path from 'path';
import * as stream from 'stream';
export { path }; export { path, stream };
// @pushrocks scope // @pushrocks scope
import * as smartfile from '@push.rocks/smartfile'; import * as smartfile from '@push.rocks/smartfile';
@ -11,14 +12,14 @@ import * as smartrequest from '@push.rocks/smartrequest';
import * as smartunique from '@push.rocks/smartunique'; import * as smartunique from '@push.rocks/smartunique';
import * as smartstream from '@push.rocks/smartstream'; import * as smartstream from '@push.rocks/smartstream';
import * as smartrx from '@push.rocks/smartrx'; import * as smartrx from '@push.rocks/smartrx';
import * as smarturl from '@push.rocks/smarturl';
export { smartfile, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx }; export { smartfile, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx, smarturl };
// third party scope // third party scope
import gunzipMaybe from 'gunzip-maybe'; import * as fileType from 'file-type';
import * as fflate from 'fflate';
// @ts-ignore
import tar from 'tar';
import tarStream from 'tar-stream'; import tarStream from 'tar-stream';
import unbzip2Stream from 'unbzip2-stream';
export { gunzipMaybe, tar, tarStream }; export { fileType, fflate, tarStream, unbzip2Stream };

View File

@ -1,133 +0,0 @@
import * as plugins from './smartarchive.plugins.js';
import * as paths from './smartarchive.paths.js';
export class SmartArchive {
constructor() {}
/**
* extracts an archive from a given url
*/
public async extractArchiveFromUrlToFs(urlArg: string, targetDir: string) {
const parsedPath = plugins.path.parse(urlArg);
const uniqueFileName = plugins.smartunique.uni() + parsedPath.ext;
plugins.smartfile.fs.ensureDir(paths.nogitDir); // TODO: totally remove caching needs
const downloadPath = plugins.path.join(paths.nogitDir, uniqueFileName);
const downloadedArchive = (await plugins.smartrequest.getBinary(urlArg)).body;
await plugins.smartfile.memory.toFs(downloadedArchive, downloadPath);
await this.extractArchiveFromFilePathToFs(downloadPath, targetDir);
await plugins.smartfile.fs.remove(downloadPath);
}
/**
* extracts an archive from a given filePath on disk
* @param filePathArg
* @param targetDirArg
*/
public async extractArchiveFromFilePathToFs(filePathArg: string, targetDirArg: string) {
console.log(`extracting ${filePathArg}`);
const done = plugins.smartpromise.defer();
filePathArg = plugins.smartpath.transform.makeAbsolute(filePathArg);
targetDirArg = plugins.smartpath.transform.makeAbsolute(targetDirArg);
const readableStream = plugins.smartfile.fsStream.createReadStream(filePathArg);
const extractPipeStop = plugins.tarStream.extract();
extractPipeStop.on('entry', async (header, stream, next) => {
const targetFilePath = plugins.path.join(targetDirArg, header.name);
const parsedPath = plugins.path.parse(targetFilePath);
await plugins.smartfile.fs.ensureDir(parsedPath.dir);
const writeStream = plugins.smartfile.fsStream.createWriteStream(targetFilePath);
stream.pipe(writeStream);
stream.on('end', () => {
console.log(`extracted ${header.name}`);
next();
});
stream.resume();
});
extractPipeStop.on('finish', () => {
console.log(`Sucessfully extracted ${filePathArg}!`);
done.resolve();
});
// lets run the stream
readableStream.pipe(plugins.gunzipMaybe()).pipe(extractPipeStop);
await done.promise;
}
/**
* extracts to Observable
* where the Observable is emitting smartfiles
*/
public async extractArchiveFromBufferToObservable(
bufferArg: Buffer
): Promise<plugins.smartrx.rxjs.ReplaySubject<plugins.smartfile.Smartfile>> {
const { intake, replaySubject } = this.extractArchiveWithIntakeAndReplaySubject();
intake.pushData(bufferArg);
intake.signalEnd();
return replaySubject;
}
extractArchiveWithIntakeAndReplaySubject() {
const intake = new plugins.smartstream.StreamIntake<Buffer>();
const replaySubject = new plugins.smartrx.rxjs.ReplaySubject<plugins.smartfile.Smartfile>();
const readableStream = intake.getReadableStream();
const extractPipeStop = plugins.tarStream.extract();
extractPipeStop.on('entry', (header, stream, next) => {
let fileBuffer: Buffer;
stream.on('data', (chunkArg) => {
if (!fileBuffer) {
fileBuffer = chunkArg;
} else {
fileBuffer = Buffer.concat([fileBuffer, chunkArg]);
}
});
stream.on('end', () => {
replaySubject.next(
new plugins.smartfile.Smartfile({
base: null, // no working directory for this one
contentBuffer: fileBuffer,
path: `${header.name}`,
})
);
next();
});
stream.resume();
});
extractPipeStop.on('finish', () => {
replaySubject.complete();
});
// lets run the stream
readableStream.pipe(plugins.gunzipMaybe()).pipe(extractPipeStop);
return {
intake,
replaySubject,
};
}
/**
* extracts to Observable
*/
public async extractArchiveFromUrlToObservable(
urlArg: string
): Promise<plugins.smartrx.rxjs.ReplaySubject<plugins.smartfile.Smartfile>> {
const response = await plugins.smartrequest.getBinary(urlArg);
const replaySubject = this.extractArchiveFromBufferToObservable(response.body);
return replaySubject;
}
// TODO
public async extractArchiveFromUrlToStream() {}
// TODO
public async extractArchiveFromFilePathToStream() {}
// TODO
public async extractArchiveFromStreamToStream() {}
// TODO
public async packFromStreamToStream() {}
// TODO
public async packFromDirPathToStream() {}
// TODO
public async packFromDirPathToFs() {}
}

View File

@ -3,7 +3,12 @@
"experimentalDecorators": true, "experimentalDecorators": true,
"useDefineForClassFields": false, "useDefineForClassFields": false,
"target": "ES2022", "target": "ES2022",
"module": "ES2022", "module": "NodeNext",
"moduleResolution": "nodenext" "moduleResolution": "NodeNext",
} "esModuleInterop": true,
"verbatimModuleSyntax": true
},
"exclude": [
"dist_*/**/*.d.ts"
]
} }