2023-11-06 18:14:21 +01:00
|
|
|
import * as plugins from './plugins.js';
|
2025-11-25 12:32:13 +00:00
|
|
|
import type {
|
|
|
|
|
IArchiveEntry,
|
|
|
|
|
IArchiveEntryInfo,
|
|
|
|
|
IArchiveInfo,
|
|
|
|
|
TArchiveFormat,
|
|
|
|
|
TCompressionLevel,
|
2025-11-25 13:37:27 +00:00
|
|
|
TEntryFilter,
|
2025-11-25 12:32:13 +00:00
|
|
|
} from './interfaces.js';
|
2023-11-06 18:14:21 +01:00
|
|
|
|
2024-03-17 00:29:42 +01:00
|
|
|
import { Bzip2Tools } from './classes.bzip2tools.js';
|
2023-11-06 18:14:21 +01:00
|
|
|
import { GzipTools } from './classes.gziptools.js';
|
|
|
|
|
import { TarTools } from './classes.tartools.js';
|
2024-03-17 00:29:42 +01:00
|
|
|
import { ZipTools } from './classes.ziptools.js';
|
2025-11-25 12:32:13 +00:00
|
|
|
import { ArchiveAnalyzer, type IAnalyzedResult } from './classes.archiveanalyzer.js';
|
2023-11-06 18:14:21 +01:00
|
|
|
|
2025-11-25 12:32:13 +00:00
|
|
|
/**
|
2025-11-25 13:37:27 +00:00
|
|
|
* Pending directory entry for async resolution
|
|
|
|
|
*/
|
|
|
|
|
interface IPendingDirectory {
|
|
|
|
|
sourcePath: string;
|
|
|
|
|
archiveBase?: string;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Main class for archive manipulation with fluent API
|
2025-11-25 12:32:13 +00:00
|
|
|
* Supports TAR, ZIP, GZIP, and BZIP2 formats
|
2025-11-25 13:37:27 +00:00
|
|
|
*
|
|
|
|
|
* @example Extraction from URL
|
|
|
|
|
* ```typescript
|
|
|
|
|
* await SmartArchive.create()
|
|
|
|
|
* .url('https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz')
|
|
|
|
|
* .stripComponents(1)
|
|
|
|
|
* .extract('./node_modules/lodash');
|
|
|
|
|
* ```
|
|
|
|
|
*
|
|
|
|
|
* @example Creation with thenable
|
|
|
|
|
* ```typescript
|
|
|
|
|
* const archive = await SmartArchive.create()
|
|
|
|
|
* .format('tar.gz')
|
|
|
|
|
* .compression(9)
|
|
|
|
|
* .entry('config.json', JSON.stringify(config))
|
|
|
|
|
* .directory('./src');
|
|
|
|
|
* ```
|
2025-11-25 12:32:13 +00:00
|
|
|
*/
|
2023-11-06 18:14:21 +01:00
|
|
|
export class SmartArchive {
|
2025-11-25 12:32:13 +00:00
|
|
|
// ============================================
|
2025-11-25 13:37:27 +00:00
|
|
|
// STATIC ENTRY POINT
|
2025-11-25 12:32:13 +00:00
|
|
|
// ============================================
|
|
|
|
|
|
|
|
|
|
/**
|
2025-11-25 13:37:27 +00:00
|
|
|
* Create a new SmartArchive instance for fluent configuration
|
2025-11-25 12:32:13 +00:00
|
|
|
*/
|
2025-11-25 13:37:27 +00:00
|
|
|
public static create(): SmartArchive {
|
|
|
|
|
return new SmartArchive();
|
2023-11-06 18:14:21 +01:00
|
|
|
}
|
|
|
|
|
|
2025-11-25 13:37:27 +00:00
|
|
|
// ============================================
|
|
|
|
|
// TOOLS (public for internal use)
|
|
|
|
|
// ============================================
|
2023-11-06 18:14:21 +01:00
|
|
|
|
2025-11-25 13:37:27 +00:00
|
|
|
public tarTools = new TarTools();
|
|
|
|
|
public zipTools = new ZipTools();
|
|
|
|
|
public gzipTools = new GzipTools();
|
|
|
|
|
public bzip2Tools = new Bzip2Tools(this);
|
|
|
|
|
public archiveAnalyzer = new ArchiveAnalyzer(this);
|
2023-11-06 18:14:21 +01:00
|
|
|
|
2025-11-25 13:37:27 +00:00
|
|
|
// ============================================
|
|
|
|
|
// SOURCE STATE (extraction mode)
|
|
|
|
|
// ============================================
|
|
|
|
|
|
|
|
|
|
private sourceUrl?: string;
|
|
|
|
|
private sourceFilePath?: string;
|
|
|
|
|
private sourceStream?: plugins.stream.Readable | plugins.stream.Duplex | plugins.stream.Transform;
|
2025-11-25 12:32:13 +00:00
|
|
|
|
|
|
|
|
// ============================================
|
2025-11-25 13:37:27 +00:00
|
|
|
// CREATION STATE
|
2025-11-25 12:32:13 +00:00
|
|
|
// ============================================
|
|
|
|
|
|
2025-11-25 13:37:27 +00:00
|
|
|
private archiveBuffer?: Buffer;
|
|
|
|
|
private creationFormat?: TArchiveFormat;
|
|
|
|
|
private _compressionLevel: TCompressionLevel = 6;
|
|
|
|
|
private pendingEntries: IArchiveEntry[] = [];
|
|
|
|
|
private pendingDirectories: IPendingDirectory[] = [];
|
2025-11-25 12:32:13 +00:00
|
|
|
|
2025-11-25 13:37:27 +00:00
|
|
|
// ============================================
|
|
|
|
|
// FLUENT STATE
|
|
|
|
|
// ============================================
|
2025-11-25 12:32:13 +00:00
|
|
|
|
2025-11-25 13:37:27 +00:00
|
|
|
private _mode: 'extract' | 'create' | null = null;
|
|
|
|
|
private _filters: TEntryFilter[] = [];
|
|
|
|
|
private _excludePatterns: RegExp[] = [];
|
|
|
|
|
private _includePatterns: RegExp[] = [];
|
|
|
|
|
private _stripComponents: number = 0;
|
|
|
|
|
private _overwrite: boolean = false;
|
|
|
|
|
private _fileName?: string;
|
2025-11-25 12:32:13 +00:00
|
|
|
|
2025-11-25 13:37:27 +00:00
|
|
|
constructor() {}
|
2025-11-25 12:32:13 +00:00
|
|
|
|
2025-11-25 13:37:27 +00:00
|
|
|
// ============================================
|
|
|
|
|
// SOURCE METHODS (set extraction mode)
|
|
|
|
|
// ============================================
|
2025-11-25 12:32:13 +00:00
|
|
|
|
2025-11-25 13:37:27 +00:00
|
|
|
/**
|
|
|
|
|
* Load archive from URL
|
|
|
|
|
*/
|
|
|
|
|
public url(urlArg: string): this {
|
|
|
|
|
this.ensureNotInCreateMode('url');
|
|
|
|
|
this._mode = 'extract';
|
|
|
|
|
this.sourceUrl = urlArg;
|
|
|
|
|
return this;
|
2025-11-25 12:32:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2025-11-25 13:37:27 +00:00
|
|
|
* Load archive from file path
|
2025-11-25 12:32:13 +00:00
|
|
|
*/
|
2025-11-25 13:37:27 +00:00
|
|
|
public file(pathArg: string): this {
|
|
|
|
|
this.ensureNotInCreateMode('file');
|
|
|
|
|
this._mode = 'extract';
|
|
|
|
|
this.sourceFilePath = pathArg;
|
|
|
|
|
return this;
|
2025-11-25 12:32:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2025-11-25 13:37:27 +00:00
|
|
|
* Load archive from readable stream
|
2025-11-25 12:32:13 +00:00
|
|
|
*/
|
2025-11-25 13:37:27 +00:00
|
|
|
public stream(streamArg: plugins.stream.Readable | plugins.stream.Duplex | plugins.stream.Transform): this {
|
|
|
|
|
this.ensureNotInCreateMode('stream');
|
|
|
|
|
this._mode = 'extract';
|
|
|
|
|
this.sourceStream = streamArg;
|
|
|
|
|
return this;
|
2025-11-25 12:32:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2025-11-25 13:37:27 +00:00
|
|
|
* Load archive from buffer
|
2025-11-25 12:32:13 +00:00
|
|
|
*/
|
2025-11-25 13:37:27 +00:00
|
|
|
public buffer(bufferArg: Buffer): this {
|
|
|
|
|
this.ensureNotInCreateMode('buffer');
|
|
|
|
|
this._mode = 'extract';
|
|
|
|
|
this.sourceStream = plugins.stream.Readable.from(bufferArg);
|
|
|
|
|
return this;
|
2025-11-25 12:32:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================
|
2025-11-25 13:37:27 +00:00
|
|
|
// FORMAT METHODS (set creation mode)
|
2025-11-25 12:32:13 +00:00
|
|
|
// ============================================
|
|
|
|
|
|
2025-11-25 13:37:27 +00:00
|
|
|
/**
|
|
|
|
|
* Set output format for archive creation
|
|
|
|
|
*/
|
|
|
|
|
public format(fmt: TArchiveFormat): this {
|
|
|
|
|
this.ensureNotInExtractMode('format');
|
|
|
|
|
this._mode = 'create';
|
|
|
|
|
this.creationFormat = fmt;
|
|
|
|
|
return this;
|
|
|
|
|
}
|
2023-11-06 18:14:21 +01:00
|
|
|
|
2025-11-25 13:37:27 +00:00
|
|
|
/**
|
|
|
|
|
* Set compression level (0-9)
|
|
|
|
|
*/
|
|
|
|
|
public compression(level: TCompressionLevel): this {
|
|
|
|
|
this._compressionLevel = level;
|
|
|
|
|
return this;
|
|
|
|
|
}
|
2023-11-06 18:14:21 +01:00
|
|
|
|
2025-11-25 12:32:13 +00:00
|
|
|
// ============================================
|
2025-11-25 13:37:27 +00:00
|
|
|
// CONTENT METHODS (creation mode)
|
2025-11-25 12:32:13 +00:00
|
|
|
// ============================================
|
|
|
|
|
|
|
|
|
|
/**
|
2025-11-25 13:37:27 +00:00
|
|
|
* Add a single file entry to the archive
|
2025-11-25 12:32:13 +00:00
|
|
|
*/
|
2025-11-25 13:37:27 +00:00
|
|
|
public entry(archivePath: string, content: string | Buffer): this {
|
|
|
|
|
this.ensureNotInExtractMode('entry');
|
|
|
|
|
if (!this._mode) this._mode = 'create';
|
2025-11-25 12:32:13 +00:00
|
|
|
this.pendingEntries.push({ archivePath, content });
|
|
|
|
|
return this;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2025-11-25 13:37:27 +00:00
|
|
|
* Add multiple entries to the archive
|
2025-11-25 12:32:13 +00:00
|
|
|
*/
|
2025-11-25 13:37:27 +00:00
|
|
|
public entries(entriesArg: Array<{ archivePath: string; content: string | Buffer }>): this {
|
|
|
|
|
this.ensureNotInExtractMode('entries');
|
|
|
|
|
if (!this._mode) this._mode = 'create';
|
|
|
|
|
for (const e of entriesArg) {
|
|
|
|
|
this.pendingEntries.push({ archivePath: e.archivePath, content: e.content });
|
2025-11-25 12:32:13 +00:00
|
|
|
}
|
2025-11-25 13:37:27 +00:00
|
|
|
return this;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Add an entire directory to the archive (queued, resolved at build time)
|
|
|
|
|
*/
|
|
|
|
|
public directory(sourcePath: string, archiveBase?: string): this {
|
|
|
|
|
this.ensureNotInExtractMode('directory');
|
|
|
|
|
if (!this._mode) this._mode = 'create';
|
|
|
|
|
this.pendingDirectories.push({ sourcePath, archiveBase });
|
|
|
|
|
return this;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Add a SmartFile to the archive
|
|
|
|
|
*/
|
|
|
|
|
public addSmartFile(fileArg: plugins.smartfile.SmartFile, archivePath?: string): this {
|
|
|
|
|
this.ensureNotInExtractMode('addSmartFile');
|
|
|
|
|
if (!this._mode) this._mode = 'create';
|
2025-11-25 12:32:13 +00:00
|
|
|
this.pendingEntries.push({
|
2025-11-25 13:37:27 +00:00
|
|
|
archivePath: archivePath || fileArg.relative,
|
|
|
|
|
content: fileArg,
|
2025-11-25 12:32:13 +00:00
|
|
|
});
|
|
|
|
|
return this;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2025-11-25 13:37:27 +00:00
|
|
|
* Add a StreamFile to the archive
|
2025-11-25 12:32:13 +00:00
|
|
|
*/
|
2025-11-25 13:37:27 +00:00
|
|
|
public addStreamFile(fileArg: plugins.smartfile.StreamFile, archivePath?: string): this {
|
|
|
|
|
this.ensureNotInExtractMode('addStreamFile');
|
|
|
|
|
if (!this._mode) this._mode = 'create';
|
2025-11-25 12:32:13 +00:00
|
|
|
this.pendingEntries.push({
|
2025-11-25 13:37:27 +00:00
|
|
|
archivePath: archivePath || fileArg.relativeFilePath,
|
|
|
|
|
content: fileArg,
|
2025-11-25 12:32:13 +00:00
|
|
|
});
|
|
|
|
|
return this;
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-25 13:37:27 +00:00
|
|
|
// ============================================
|
|
|
|
|
// FILTER METHODS (both modes)
|
|
|
|
|
// ============================================
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Filter entries by predicate function
|
|
|
|
|
*/
|
|
|
|
|
public filter(predicate: TEntryFilter): this {
|
|
|
|
|
this._filters.push(predicate);
|
|
|
|
|
return this;
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-25 12:32:13 +00:00
|
|
|
/**
|
2025-11-25 13:37:27 +00:00
|
|
|
* Include only entries matching the pattern
|
2025-11-25 12:32:13 +00:00
|
|
|
*/
|
2025-11-25 13:37:27 +00:00
|
|
|
public include(pattern: string | RegExp): this {
|
|
|
|
|
const regex = typeof pattern === 'string' ? new RegExp(pattern) : pattern;
|
|
|
|
|
this._includePatterns.push(regex);
|
|
|
|
|
return this;
|
|
|
|
|
}
|
2025-11-25 12:32:13 +00:00
|
|
|
|
2025-11-25 13:37:27 +00:00
|
|
|
/**
|
|
|
|
|
* Exclude entries matching the pattern
|
|
|
|
|
*/
|
|
|
|
|
public exclude(pattern: string | RegExp): this {
|
|
|
|
|
const regex = typeof pattern === 'string' ? new RegExp(pattern) : pattern;
|
|
|
|
|
this._excludePatterns.push(regex);
|
2025-11-25 12:32:13 +00:00
|
|
|
return this;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================
|
2025-11-25 13:37:27 +00:00
|
|
|
// EXTRACTION OPTIONS
|
2025-11-25 12:32:13 +00:00
|
|
|
// ============================================
|
|
|
|
|
|
2023-11-06 18:14:21 +01:00
|
|
|
/**
|
2025-11-25 13:37:27 +00:00
|
|
|
* Strip N leading path components from extracted files
|
2023-11-06 18:14:21 +01:00
|
|
|
*/
|
2025-11-25 13:37:27 +00:00
|
|
|
public stripComponents(n: number): this {
|
|
|
|
|
this._stripComponents = n;
|
|
|
|
|
return this;
|
2025-11-25 12:32:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2025-11-25 13:37:27 +00:00
|
|
|
* Overwrite existing files during extraction
|
2025-11-25 12:32:13 +00:00
|
|
|
*/
|
2025-11-25 13:37:27 +00:00
|
|
|
public overwrite(value: boolean = true): this {
|
|
|
|
|
this._overwrite = value;
|
|
|
|
|
return this;
|
2023-11-06 18:14:21 +01:00
|
|
|
}
|
|
|
|
|
|
2025-11-25 12:32:13 +00:00
|
|
|
/**
|
2025-11-25 13:37:27 +00:00
|
|
|
* Set output filename for single-file archives (gz, bz2)
|
2025-11-25 12:32:13 +00:00
|
|
|
*/
|
2025-11-25 13:37:27 +00:00
|
|
|
public fileName(name: string): this {
|
|
|
|
|
this._fileName = name;
|
|
|
|
|
return this;
|
2023-11-06 18:14:21 +01:00
|
|
|
}
|
|
|
|
|
|
2025-11-25 13:37:27 +00:00
|
|
|
// ============================================
|
|
|
|
|
// TERMINAL METHODS - EXTRACTION
|
|
|
|
|
// ============================================
|
|
|
|
|
|
2025-11-25 12:32:13 +00:00
|
|
|
/**
|
2025-11-25 13:37:27 +00:00
|
|
|
* Extract archive to filesystem directory
|
2025-11-25 12:32:13 +00:00
|
|
|
*/
|
2025-11-25 13:37:27 +00:00
|
|
|
public async extract(targetDir: string): Promise<void> {
|
|
|
|
|
this.ensureExtractionSource();
|
2023-11-06 19:38:36 +01:00
|
|
|
const done = plugins.smartpromise.defer<void>();
|
2025-11-25 13:37:27 +00:00
|
|
|
const streamFileStream = await this.toStreamFiles();
|
2025-11-25 12:32:13 +00:00
|
|
|
|
2024-03-17 00:29:42 +01:00
|
|
|
streamFileStream.pipe(
|
|
|
|
|
new plugins.smartstream.SmartDuplex({
|
|
|
|
|
objectMode: true,
|
2025-11-25 12:32:13 +00:00
|
|
|
writeFunction: async (streamFileArg: plugins.smartfile.StreamFile) => {
|
|
|
|
|
const innerDone = plugins.smartpromise.defer<void>();
|
2024-03-17 00:35:17 +01:00
|
|
|
const streamFile = streamFileArg;
|
2025-11-25 13:37:27 +00:00
|
|
|
let relativePath = streamFile.relativeFilePath || this._fileName || 'extracted_file';
|
2025-11-25 12:32:13 +00:00
|
|
|
|
2025-11-25 13:37:27 +00:00
|
|
|
// Apply stripComponents
|
|
|
|
|
if (this._stripComponents > 0) {
|
2025-11-25 12:32:13 +00:00
|
|
|
const parts = relativePath.split('/');
|
2025-11-25 13:37:27 +00:00
|
|
|
relativePath = parts.slice(this._stripComponents).join('/');
|
2025-11-25 12:32:13 +00:00
|
|
|
if (!relativePath) {
|
|
|
|
|
innerDone.resolve();
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-25 13:37:27 +00:00
|
|
|
// Apply filter
|
|
|
|
|
const filterFn = this.buildFilterFunction();
|
|
|
|
|
if (filterFn) {
|
2025-11-25 12:32:13 +00:00
|
|
|
const entryInfo: IArchiveEntryInfo = {
|
|
|
|
|
path: relativePath,
|
|
|
|
|
size: 0,
|
|
|
|
|
isDirectory: false,
|
|
|
|
|
isFile: true,
|
|
|
|
|
};
|
2025-11-25 13:37:27 +00:00
|
|
|
if (!filterFn(entryInfo)) {
|
2025-11-25 12:32:13 +00:00
|
|
|
innerDone.resolve();
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-03-17 00:29:42 +01:00
|
|
|
const readStream = await streamFile.createReadStream();
|
2025-11-25 11:59:11 +00:00
|
|
|
await plugins.fsPromises.mkdir(targetDir, { recursive: true });
|
2025-11-25 12:32:13 +00:00
|
|
|
const writePath = plugins.path.join(targetDir, relativePath);
|
2025-11-25 11:59:11 +00:00
|
|
|
await plugins.fsPromises.mkdir(plugins.path.dirname(writePath), { recursive: true });
|
|
|
|
|
const writeStream = plugins.fs.createWriteStream(writePath);
|
2024-03-17 00:29:42 +01:00
|
|
|
readStream.pipe(writeStream);
|
|
|
|
|
writeStream.on('finish', () => {
|
2025-11-25 12:32:13 +00:00
|
|
|
innerDone.resolve();
|
2024-03-17 00:29:42 +01:00
|
|
|
});
|
2025-11-25 12:32:13 +00:00
|
|
|
await innerDone.promise;
|
2024-03-17 00:29:42 +01:00
|
|
|
},
|
|
|
|
|
finalFunction: async () => {
|
2023-11-11 18:28:50 +01:00
|
|
|
done.resolve();
|
2024-03-17 00:29:42 +01:00
|
|
|
},
|
2025-11-25 12:32:13 +00:00
|
|
|
})
|
2024-03-17 00:29:42 +01:00
|
|
|
);
|
2025-11-25 12:32:13 +00:00
|
|
|
|
2023-11-06 19:38:36 +01:00
|
|
|
return done.promise;
|
|
|
|
|
}
|
2023-11-06 18:14:21 +01:00
|
|
|
|
2025-11-25 12:32:13 +00:00
|
|
|
/**
|
|
|
|
|
* Extract archive to a stream of StreamFile objects
|
|
|
|
|
*/
|
2025-11-25 13:37:27 +00:00
|
|
|
public async toStreamFiles(): Promise<plugins.smartstream.StreamIntake<plugins.smartfile.StreamFile>> {
|
|
|
|
|
this.ensureExtractionSource();
|
|
|
|
|
|
2025-11-25 12:32:13 +00:00
|
|
|
const streamFileIntake = new plugins.smartstream.StreamIntake<plugins.smartfile.StreamFile>({
|
|
|
|
|
objectMode: true,
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Guard to prevent multiple signalEnd calls
|
|
|
|
|
let hasSignaledEnd = false;
|
|
|
|
|
const safeSignalEnd = () => {
|
|
|
|
|
if (!hasSignaledEnd) {
|
|
|
|
|
hasSignaledEnd = true;
|
|
|
|
|
streamFileIntake.signalEnd();
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2025-11-25 13:37:27 +00:00
|
|
|
const archiveStream = await this.getSourceStream();
|
2023-11-06 18:14:21 +01:00
|
|
|
const createAnalyzedStream = () => this.archiveAnalyzer.getAnalyzedStream();
|
|
|
|
|
|
|
|
|
|
const createUnpackStream = () =>
|
2025-11-25 12:32:13 +00:00
|
|
|
plugins.smartstream.createTransformFunction<IAnalyzedResult, void>(
|
2023-11-06 18:14:21 +01:00
|
|
|
async (analyzedResultChunk) => {
|
2023-11-07 04:19:54 +01:00
|
|
|
if (analyzedResultChunk.fileType?.mime === 'application/x-tar') {
|
2025-11-25 12:32:13 +00:00
|
|
|
const tarStream = analyzedResultChunk.decompressionStream as plugins.tarStream.Extract;
|
|
|
|
|
|
2024-03-17 00:29:42 +01:00
|
|
|
tarStream.on('entry', async (header, stream, next) => {
|
2024-06-06 20:59:04 +02:00
|
|
|
if (header.type === 'directory') {
|
2025-11-25 12:32:13 +00:00
|
|
|
stream.resume();
|
2025-08-18 02:06:31 +00:00
|
|
|
stream.on('end', () => next());
|
2024-06-06 20:59:04 +02:00
|
|
|
return;
|
|
|
|
|
}
|
2025-11-25 12:32:13 +00:00
|
|
|
|
2025-08-18 02:06:31 +00:00
|
|
|
const passThrough = new plugins.stream.PassThrough();
|
2025-11-25 12:32:13 +00:00
|
|
|
const streamfile = plugins.smartfile.StreamFile.fromStream(passThrough, header.name);
|
2024-03-17 00:29:42 +01:00
|
|
|
streamFileIntake.push(streamfile);
|
2025-08-18 02:06:31 +00:00
|
|
|
stream.pipe(passThrough);
|
|
|
|
|
stream.on('end', () => {
|
|
|
|
|
passThrough.end();
|
|
|
|
|
next();
|
2024-03-17 00:29:42 +01:00
|
|
|
});
|
|
|
|
|
});
|
2025-11-25 12:32:13 +00:00
|
|
|
|
|
|
|
|
tarStream.on('finish', () => {
|
|
|
|
|
safeSignalEnd();
|
2023-11-07 04:19:54 +01:00
|
|
|
});
|
2025-11-25 12:32:13 +00:00
|
|
|
|
|
|
|
|
analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream);
|
2024-03-17 00:29:42 +01:00
|
|
|
} else if (analyzedResultChunk.fileType?.mime === 'application/zip') {
|
|
|
|
|
analyzedResultChunk.resultStream
|
|
|
|
|
.pipe(analyzedResultChunk.decompressionStream)
|
2025-08-18 01:29:06 +00:00
|
|
|
.pipe(
|
|
|
|
|
new plugins.smartstream.SmartDuplex({
|
|
|
|
|
objectMode: true,
|
2025-11-25 12:32:13 +00:00
|
|
|
writeFunction: async (streamFileArg: plugins.smartfile.StreamFile) => {
|
2025-08-18 01:29:06 +00:00
|
|
|
streamFileIntake.push(streamFileArg);
|
|
|
|
|
},
|
|
|
|
|
finalFunction: async () => {
|
2025-11-25 12:32:13 +00:00
|
|
|
safeSignalEnd();
|
2025-08-18 01:29:06 +00:00
|
|
|
},
|
2025-11-25 12:32:13 +00:00
|
|
|
})
|
2025-08-18 01:29:06 +00:00
|
|
|
);
|
2025-11-25 12:32:13 +00:00
|
|
|
} else if (analyzedResultChunk.isArchive && analyzedResultChunk.decompressionStream) {
|
2025-08-18 02:06:31 +00:00
|
|
|
// For nested archives (like gzip containing tar)
|
2025-11-25 12:32:13 +00:00
|
|
|
analyzedResultChunk.resultStream
|
2023-11-06 18:14:21 +01:00
|
|
|
.pipe(analyzedResultChunk.decompressionStream)
|
|
|
|
|
.pipe(createAnalyzedStream())
|
|
|
|
|
.pipe(createUnpackStream());
|
|
|
|
|
} else {
|
|
|
|
|
const streamFile = plugins.smartfile.StreamFile.fromStream(
|
|
|
|
|
analyzedResultChunk.resultStream,
|
2025-11-25 12:32:13 +00:00
|
|
|
analyzedResultChunk.fileType?.ext
|
2023-11-06 18:14:21 +01:00
|
|
|
);
|
|
|
|
|
streamFileIntake.push(streamFile);
|
2025-11-25 12:32:13 +00:00
|
|
|
safeSignalEnd();
|
2023-11-06 18:14:21 +01:00
|
|
|
}
|
2024-03-17 00:29:42 +01:00
|
|
|
},
|
2025-11-25 12:32:13 +00:00
|
|
|
{ objectMode: true }
|
2023-11-06 18:14:21 +01:00
|
|
|
);
|
|
|
|
|
|
|
|
|
|
archiveStream.pipe(createAnalyzedStream()).pipe(createUnpackStream());
|
|
|
|
|
return streamFileIntake;
|
|
|
|
|
}
|
2025-11-25 12:32:13 +00:00
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Extract archive to an array of SmartFile objects (in-memory)
|
|
|
|
|
*/
|
2025-11-25 13:37:27 +00:00
|
|
|
public async toSmartFiles(): Promise<plugins.smartfile.SmartFile[]> {
|
|
|
|
|
this.ensureExtractionSource();
|
|
|
|
|
const streamFiles = await this.toStreamFiles();
|
2025-11-25 12:32:13 +00:00
|
|
|
const smartFiles: plugins.smartfile.SmartFile[] = [];
|
2025-11-25 13:37:27 +00:00
|
|
|
const filterFn = this.buildFilterFunction();
|
|
|
|
|
const pendingConversions: Promise<void>[] = [];
|
2025-11-25 12:32:13 +00:00
|
|
|
|
|
|
|
|
return new Promise((resolve, reject) => {
|
2025-11-25 13:37:27 +00:00
|
|
|
streamFiles.on('data', (streamFile: plugins.smartfile.StreamFile) => {
|
|
|
|
|
// Track all async conversions to ensure they complete before resolving
|
|
|
|
|
const conversion = (async () => {
|
|
|
|
|
try {
|
|
|
|
|
const smartFile = await streamFile.toSmartFile();
|
|
|
|
|
|
|
|
|
|
// Apply filter if configured
|
|
|
|
|
if (filterFn) {
|
|
|
|
|
const passes = filterFn({
|
|
|
|
|
path: smartFile.relative,
|
|
|
|
|
size: smartFile.contents.length,
|
|
|
|
|
isDirectory: false,
|
|
|
|
|
isFile: true,
|
|
|
|
|
});
|
|
|
|
|
if (!passes) return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
smartFiles.push(smartFile);
|
|
|
|
|
} catch (err) {
|
|
|
|
|
reject(err);
|
|
|
|
|
}
|
|
|
|
|
})();
|
|
|
|
|
pendingConversions.push(conversion);
|
|
|
|
|
});
|
|
|
|
|
streamFiles.on('end', async () => {
|
|
|
|
|
// Wait for all conversions to complete before resolving
|
|
|
|
|
await Promise.all(pendingConversions);
|
|
|
|
|
resolve(smartFiles);
|
2025-11-25 12:32:13 +00:00
|
|
|
});
|
|
|
|
|
streamFiles.on('error', reject);
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Extract a single file from the archive by path
|
|
|
|
|
*/
|
|
|
|
|
public async extractFile(filePath: string): Promise<plugins.smartfile.SmartFile | null> {
|
2025-11-25 13:37:27 +00:00
|
|
|
this.ensureExtractionSource();
|
|
|
|
|
const streamFiles = await this.toStreamFiles();
|
2025-11-25 12:32:13 +00:00
|
|
|
|
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
|
let found = false;
|
|
|
|
|
|
|
|
|
|
streamFiles.on('data', async (streamFile: plugins.smartfile.StreamFile) => {
|
|
|
|
|
if (streamFile.relativeFilePath === filePath || streamFile.relativeFilePath?.endsWith(filePath)) {
|
|
|
|
|
found = true;
|
|
|
|
|
try {
|
|
|
|
|
const smartFile = await streamFile.toSmartFile();
|
|
|
|
|
resolve(smartFile);
|
|
|
|
|
} catch (err) {
|
|
|
|
|
reject(err);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
streamFiles.on('end', () => {
|
|
|
|
|
if (!found) {
|
|
|
|
|
resolve(null);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
streamFiles.on('error', reject);
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================
|
2025-11-25 13:37:27 +00:00
|
|
|
// TERMINAL METHODS - OUTPUT
|
|
|
|
|
// ============================================
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Build and finalize the archive, returning this instance
|
|
|
|
|
*/
|
|
|
|
|
public async build(): Promise<SmartArchive> {
|
|
|
|
|
await this.doBuild();
|
|
|
|
|
return this;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Internal build implementation (avoids thenable recursion)
|
|
|
|
|
*/
|
|
|
|
|
private async doBuild(): Promise<void> {
|
|
|
|
|
if (this._mode === 'extract') {
|
|
|
|
|
// For extraction mode, nothing to build
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (this.archiveBuffer) {
|
|
|
|
|
// Already built
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// For creation mode, build the archive buffer
|
|
|
|
|
this.ensureCreationFormat();
|
|
|
|
|
await this.resolveDirectories();
|
|
|
|
|
|
|
|
|
|
const entries = this.getFilteredEntries();
|
|
|
|
|
|
|
|
|
|
if (this.creationFormat === 'tar' || this.creationFormat === 'tar.gz' || this.creationFormat === 'tgz') {
|
|
|
|
|
if (this.creationFormat === 'tar') {
|
|
|
|
|
this.archiveBuffer = await this.tarTools.packFiles(entries);
|
|
|
|
|
} else {
|
|
|
|
|
this.archiveBuffer = await this.tarTools.packFilesToTarGz(entries, this._compressionLevel);
|
|
|
|
|
}
|
|
|
|
|
} else if (this.creationFormat === 'zip') {
|
|
|
|
|
this.archiveBuffer = await this.zipTools.createZip(entries, this._compressionLevel);
|
|
|
|
|
} else if (this.creationFormat === 'gz') {
|
|
|
|
|
if (entries.length !== 1) {
|
|
|
|
|
throw new Error('GZIP format only supports a single file');
|
|
|
|
|
}
|
|
|
|
|
let content: Buffer;
|
|
|
|
|
if (typeof entries[0].content === 'string') {
|
|
|
|
|
content = Buffer.from(entries[0].content);
|
|
|
|
|
} else if (Buffer.isBuffer(entries[0].content)) {
|
|
|
|
|
content = entries[0].content;
|
|
|
|
|
} else {
|
|
|
|
|
throw new Error('GZIP format requires string or Buffer content');
|
|
|
|
|
}
|
|
|
|
|
this.archiveBuffer = await this.gzipTools.compress(content, this._compressionLevel);
|
|
|
|
|
} else {
|
|
|
|
|
throw new Error(`Unsupported format: ${this.creationFormat}`);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Build archive and return as Buffer
|
|
|
|
|
*/
|
|
|
|
|
public async toBuffer(): Promise<Buffer> {
|
|
|
|
|
if (this._mode === 'create' && !this.archiveBuffer) {
|
|
|
|
|
await this.doBuild();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (this.archiveBuffer) {
|
|
|
|
|
return this.archiveBuffer;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// For extraction mode, get the source as buffer
|
|
|
|
|
const stream = await this.getSourceStream();
|
|
|
|
|
return this.streamToBuffer(stream);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Build archive and write to file
|
|
|
|
|
*/
|
|
|
|
|
public async toFile(filePath: string): Promise<void> {
|
|
|
|
|
const buffer = await this.toBuffer();
|
|
|
|
|
await plugins.fsPromises.mkdir(plugins.path.dirname(filePath), { recursive: true });
|
|
|
|
|
await plugins.fsPromises.writeFile(filePath, buffer);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Get archive as a readable stream
|
|
|
|
|
*/
|
|
|
|
|
public async toStream(): Promise<plugins.stream.Readable> {
|
|
|
|
|
if (this._mode === 'create' && !this.archiveBuffer) {
|
|
|
|
|
await this.doBuild();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (this.archiveBuffer) {
|
|
|
|
|
return plugins.stream.Readable.from(this.archiveBuffer);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return this.getSourceStream();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================
|
|
|
|
|
// TERMINAL METHODS - ANALYSIS
|
2025-11-25 12:32:13 +00:00
|
|
|
// ============================================
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Analyze the archive and return metadata
|
|
|
|
|
*/
|
|
|
|
|
public async analyze(): Promise<IArchiveInfo> {
|
2025-11-25 13:37:27 +00:00
|
|
|
this.ensureExtractionSource();
|
|
|
|
|
const stream = await this.getSourceStream();
|
2025-11-25 12:32:13 +00:00
|
|
|
const firstChunk = await this.readFirstChunk(stream);
|
|
|
|
|
const fileType = await plugins.fileType.fileTypeFromBuffer(firstChunk);
|
|
|
|
|
|
|
|
|
|
let format: TArchiveFormat | null = null;
|
|
|
|
|
let isCompressed = false;
|
|
|
|
|
let isArchive = false;
|
|
|
|
|
|
|
|
|
|
if (fileType) {
|
|
|
|
|
switch (fileType.mime) {
|
|
|
|
|
case 'application/gzip':
|
|
|
|
|
format = 'gz';
|
|
|
|
|
isCompressed = true;
|
|
|
|
|
isArchive = true;
|
|
|
|
|
break;
|
|
|
|
|
case 'application/zip':
|
|
|
|
|
format = 'zip';
|
|
|
|
|
isCompressed = true;
|
|
|
|
|
isArchive = true;
|
|
|
|
|
break;
|
|
|
|
|
case 'application/x-tar':
|
|
|
|
|
format = 'tar';
|
|
|
|
|
isArchive = true;
|
|
|
|
|
break;
|
|
|
|
|
case 'application/x-bzip2':
|
|
|
|
|
format = 'bz2';
|
|
|
|
|
isCompressed = true;
|
|
|
|
|
isArchive = true;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
format,
|
|
|
|
|
isCompressed,
|
|
|
|
|
isArchive,
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2025-11-25 13:37:27 +00:00
|
|
|
* List all entries in the archive
|
2025-11-25 12:32:13 +00:00
|
|
|
*/
|
2025-11-25 13:37:27 +00:00
|
|
|
public async list(): Promise<IArchiveEntryInfo[]> {
|
|
|
|
|
this.ensureExtractionSource();
|
2025-11-25 12:32:13 +00:00
|
|
|
const entries: IArchiveEntryInfo[] = [];
|
2025-11-25 13:37:27 +00:00
|
|
|
const streamFiles = await this.toStreamFiles();
|
2025-11-25 12:32:13 +00:00
|
|
|
|
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
|
streamFiles.on('data', (streamFile: plugins.smartfile.StreamFile) => {
|
|
|
|
|
entries.push({
|
|
|
|
|
path: streamFile.relativeFilePath || 'unknown',
|
|
|
|
|
size: 0, // Size not available without reading
|
|
|
|
|
isDirectory: false,
|
|
|
|
|
isFile: true,
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
streamFiles.on('end', () => resolve(entries));
|
|
|
|
|
streamFiles.on('error', reject);
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Check if a specific file exists in the archive
|
|
|
|
|
*/
|
|
|
|
|
public async hasFile(filePath: string): Promise<boolean> {
|
2025-11-25 13:37:27 +00:00
|
|
|
this.ensureExtractionSource();
|
|
|
|
|
const entries = await this.list();
|
2025-11-25 12:32:13 +00:00
|
|
|
return entries.some((e) => e.path === filePath || e.path.endsWith(filePath));
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-25 13:37:27 +00:00
|
|
|
|
|
|
|
|
// ============================================
|
|
|
|
|
// PRIVATE HELPERS
|
|
|
|
|
// ============================================
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Ensure we're not in create mode when calling extraction methods
|
|
|
|
|
*/
|
|
|
|
|
private ensureNotInCreateMode(methodName: string): void {
|
|
|
|
|
if (this._mode === 'create') {
|
|
|
|
|
throw new Error(
|
|
|
|
|
`Cannot call .${methodName}() in creation mode. ` +
|
|
|
|
|
`Use extraction methods (.url(), .file(), .stream(), .buffer()) for extraction mode.`
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Ensure we're not in extract mode when calling creation methods
|
|
|
|
|
*/
|
|
|
|
|
private ensureNotInExtractMode(methodName: string): void {
|
|
|
|
|
if (this._mode === 'extract') {
|
|
|
|
|
throw new Error(
|
|
|
|
|
`Cannot call .${methodName}() in extraction mode. ` +
|
|
|
|
|
`Use .format() for creation mode.`
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Ensure an extraction source is configured
|
|
|
|
|
*/
|
|
|
|
|
private ensureExtractionSource(): void {
|
|
|
|
|
if (!this.sourceUrl && !this.sourceFilePath && !this.sourceStream && !this.archiveBuffer) {
|
|
|
|
|
throw new Error(
|
|
|
|
|
'No source configured. Call .url(), .file(), .stream(), or .buffer() first.'
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Ensure a format is configured for creation
|
|
|
|
|
*/
|
|
|
|
|
private ensureCreationFormat(): void {
|
|
|
|
|
if (!this.creationFormat) {
|
|
|
|
|
throw new Error('No format specified. Call .format() before creating archive.');
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Get the source stream
|
|
|
|
|
*/
|
|
|
|
|
private async getSourceStream(): Promise<plugins.stream.Readable> {
|
|
|
|
|
if (this.archiveBuffer) {
|
|
|
|
|
return plugins.stream.Readable.from(this.archiveBuffer);
|
|
|
|
|
}
|
|
|
|
|
if (this.sourceStream) {
|
|
|
|
|
return this.sourceStream;
|
|
|
|
|
}
|
|
|
|
|
if (this.sourceUrl) {
|
|
|
|
|
const response = await plugins.smartrequest.SmartRequest.create()
|
|
|
|
|
.url(this.sourceUrl)
|
|
|
|
|
.get();
|
|
|
|
|
const webStream = response.stream();
|
|
|
|
|
return plugins.stream.Readable.fromWeb(webStream as any);
|
|
|
|
|
}
|
|
|
|
|
if (this.sourceFilePath) {
|
|
|
|
|
return plugins.fs.createReadStream(this.sourceFilePath);
|
|
|
|
|
}
|
|
|
|
|
throw new Error('No archive source configured');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Build a combined filter function from all configured filters
|
|
|
|
|
*/
|
|
|
|
|
private buildFilterFunction(): TEntryFilter | undefined {
|
|
|
|
|
const hasFilters =
|
|
|
|
|
this._filters.length > 0 ||
|
|
|
|
|
this._includePatterns.length > 0 ||
|
|
|
|
|
this._excludePatterns.length > 0;
|
|
|
|
|
|
|
|
|
|
if (!hasFilters) {
|
|
|
|
|
return undefined;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return (entry: IArchiveEntryInfo) => {
|
|
|
|
|
// Check include patterns (if any specified, at least one must match)
|
|
|
|
|
if (this._includePatterns.length > 0) {
|
|
|
|
|
const included = this._includePatterns.some((p) => p.test(entry.path));
|
|
|
|
|
if (!included) return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Check exclude patterns (none must match)
|
|
|
|
|
for (const pattern of this._excludePatterns) {
|
|
|
|
|
if (pattern.test(entry.path)) return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Check custom filters (all must pass)
|
|
|
|
|
for (const filter of this._filters) {
|
|
|
|
|
if (!filter(entry)) return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Resolve pending directories to entries
|
|
|
|
|
*/
|
|
|
|
|
private async resolveDirectories(): Promise<void> {
|
|
|
|
|
for (const dir of this.pendingDirectories) {
|
|
|
|
|
const files = await plugins.listFileTree(dir.sourcePath, '**/*');
|
|
|
|
|
for (const filePath of files) {
|
|
|
|
|
const archivePath = dir.archiveBase
|
|
|
|
|
? plugins.path.join(dir.archiveBase, filePath)
|
|
|
|
|
: filePath;
|
|
|
|
|
const absolutePath = plugins.path.join(dir.sourcePath, filePath);
|
|
|
|
|
const content = await plugins.fsPromises.readFile(absolutePath);
|
|
|
|
|
this.pendingEntries.push({
|
|
|
|
|
archivePath,
|
|
|
|
|
content,
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
this.pendingDirectories = [];
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Get entries filtered by include/exclude patterns
|
|
|
|
|
*/
|
|
|
|
|
private getFilteredEntries(): IArchiveEntry[] {
|
|
|
|
|
const filterFn = this.buildFilterFunction();
|
|
|
|
|
if (!filterFn) {
|
|
|
|
|
return this.pendingEntries;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return this.pendingEntries.filter((entry) =>
|
|
|
|
|
filterFn({
|
|
|
|
|
path: entry.archivePath,
|
|
|
|
|
size: 0,
|
|
|
|
|
isDirectory: false,
|
|
|
|
|
isFile: true,
|
|
|
|
|
})
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Convert a stream to buffer
|
|
|
|
|
*/
|
|
|
|
|
private async streamToBuffer(stream: plugins.stream.Readable): Promise<Buffer> {
|
|
|
|
|
const chunks: Buffer[] = [];
|
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
|
stream.on('data', (chunk) => chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)));
|
|
|
|
|
stream.on('end', () => resolve(Buffer.concat(chunks)));
|
|
|
|
|
stream.on('error', reject);
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-25 12:32:13 +00:00
|
|
|
/**
|
2025-11-25 13:37:27 +00:00
|
|
|
* Read first chunk from stream
|
2025-11-25 12:32:13 +00:00
|
|
|
*/
|
|
|
|
|
private async readFirstChunk(stream: plugins.stream.Readable): Promise<Buffer> {
|
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
|
const onData = (chunk: Buffer) => {
|
|
|
|
|
stream.removeListener('data', onData);
|
|
|
|
|
stream.removeListener('error', reject);
|
|
|
|
|
resolve(chunk);
|
|
|
|
|
};
|
|
|
|
|
stream.on('data', onData);
|
|
|
|
|
stream.on('error', reject);
|
|
|
|
|
});
|
|
|
|
|
}
|
2023-11-06 18:14:21 +01:00
|
|
|
}
|