BREAKING CHANGE(SmartArchive): Refactor public API: rename factory/extraction methods, introduce typed interfaces and improved compression tools
This commit is contained in:
@@ -1,11 +1,14 @@
|
||||
import type { SmartArchive } from './classes.smartarchive.js';
|
||||
import * as plugins from './plugins.js';
|
||||
import type { IArchiveEntry, TCompressionLevel } from './interfaces.js';
|
||||
import { GzipTools } from './classes.gziptools.js';
|
||||
|
||||
/**
|
||||
* TAR archive creation and extraction utilities
|
||||
*/
|
||||
export class TarTools {
|
||||
// INSTANCE
|
||||
constructor() {}
|
||||
|
||||
// packing
|
||||
/**
|
||||
* Add a file to a TAR pack stream
|
||||
*/
|
||||
public async addFileToPack(
|
||||
pack: plugins.tarStream.Pack,
|
||||
optionsArg: {
|
||||
@@ -13,12 +16,12 @@ export class TarTools {
|
||||
content?:
|
||||
| string
|
||||
| Buffer
|
||||
| plugins.smartstream.stream.Readable
|
||||
| plugins.stream.Readable
|
||||
| plugins.smartfile.SmartFile
|
||||
| plugins.smartfile.StreamFile;
|
||||
byteLength?: number;
|
||||
filePath?: string;
|
||||
},
|
||||
}
|
||||
): Promise<void> {
|
||||
return new Promise<void>(async (resolve, reject) => {
|
||||
let fileName: string | null = null;
|
||||
@@ -26,18 +29,20 @@ export class TarTools {
|
||||
if (optionsArg.fileName) {
|
||||
fileName = optionsArg.fileName;
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
|
||||
fileName = (optionsArg.content as plugins.smartfile.SmartFile).relative;
|
||||
fileName = optionsArg.content.relative;
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
|
||||
fileName = (optionsArg.content as plugins.smartfile.StreamFile)
|
||||
.relativeFilePath;
|
||||
fileName = optionsArg.content.relativeFilePath;
|
||||
} else if (optionsArg.filePath) {
|
||||
fileName = optionsArg.filePath;
|
||||
}
|
||||
|
||||
/**
|
||||
* contentByteLength is used to set the size of the entry in the tar file
|
||||
*/
|
||||
let contentByteLength: number;
|
||||
if (!fileName) {
|
||||
reject(new Error('No filename specified for TAR entry'));
|
||||
return;
|
||||
}
|
||||
|
||||
// Determine content byte length
|
||||
let contentByteLength: number | undefined;
|
||||
if (optionsArg.byteLength) {
|
||||
contentByteLength = optionsArg.byteLength;
|
||||
} else if (typeof optionsArg.content === 'string') {
|
||||
@@ -45,72 +50,59 @@ export class TarTools {
|
||||
} else if (Buffer.isBuffer(optionsArg.content)) {
|
||||
contentByteLength = optionsArg.content.length;
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
|
||||
contentByteLength = await optionsArg.content.getSize(); // assuming SmartFile has getSize method
|
||||
contentByteLength = await optionsArg.content.getSize();
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
|
||||
contentByteLength = await optionsArg.content.getSize(); // assuming StreamFile has getSize method
|
||||
} else if (
|
||||
optionsArg.content instanceof plugins.smartstream.stream.Readable
|
||||
) {
|
||||
console.warn(
|
||||
'@push.rocks/smartarchive: When streaming, it is recommended to provide byteLength, if known.',
|
||||
);
|
||||
contentByteLength = await optionsArg.content.getSize();
|
||||
} else if (optionsArg.filePath) {
|
||||
const fileStat = await plugins.fsPromises.stat(optionsArg.filePath);
|
||||
contentByteLength = fileStat.size;
|
||||
}
|
||||
|
||||
/**
|
||||
* here we try to harmonize all kind of entries towards a readable stream
|
||||
*/
|
||||
let content: plugins.smartstream.stream.Readable;
|
||||
// Convert all content types to Readable stream
|
||||
let content: plugins.stream.Readable;
|
||||
if (Buffer.isBuffer(optionsArg.content)) {
|
||||
content = plugins.smartstream.stream.Readable.from(optionsArg.content);
|
||||
content = plugins.stream.Readable.from(optionsArg.content);
|
||||
} else if (typeof optionsArg.content === 'string') {
|
||||
content = plugins.smartstream.stream.Readable.from(
|
||||
Buffer.from(optionsArg.content),
|
||||
);
|
||||
content = plugins.stream.Readable.from(Buffer.from(optionsArg.content));
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.SmartFile) {
|
||||
content = plugins.smartstream.stream.Readable.from(
|
||||
optionsArg.content.contents,
|
||||
);
|
||||
content = plugins.stream.Readable.from(optionsArg.content.contents);
|
||||
} else if (optionsArg.content instanceof plugins.smartfile.StreamFile) {
|
||||
content = await optionsArg.content.createReadStream();
|
||||
} else if (
|
||||
optionsArg.content instanceof plugins.smartstream.stream.Readable
|
||||
) {
|
||||
} else if (optionsArg.content instanceof plugins.stream.Readable) {
|
||||
content = optionsArg.content;
|
||||
} else if (optionsArg.filePath) {
|
||||
content = plugins.fs.createReadStream(optionsArg.filePath);
|
||||
} else {
|
||||
reject(new Error('No content or filePath specified for TAR entry'));
|
||||
return;
|
||||
}
|
||||
|
||||
const entry = pack.entry(
|
||||
{
|
||||
name: fileName,
|
||||
...(contentByteLength
|
||||
? {
|
||||
size: contentByteLength,
|
||||
}
|
||||
: null),
|
||||
...(contentByteLength !== undefined ? { size: contentByteLength } : {}),
|
||||
},
|
||||
(err: Error) => {
|
||||
(err: Error | null) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
content.pipe(entry);
|
||||
resolve();
|
||||
// Note: resolve() is called in the callback above when pipe completes
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* packs a directory from disk into a tar stream
|
||||
* @param directoryPath
|
||||
* Pack a directory into a TAR stream
|
||||
*/
|
||||
public async packDirectory(directoryPath: string) {
|
||||
public async packDirectory(directoryPath: string): Promise<plugins.tarStream.Pack> {
|
||||
const fileTree = await plugins.listFileTree(directoryPath, '**/*');
|
||||
const pack = await this.getPackStream();
|
||||
|
||||
for (const filePath of fileTree) {
|
||||
const absolutePath = plugins.path.join(directoryPath, filePath);
|
||||
const fileStat = await plugins.fsPromises.stat(absolutePath);
|
||||
@@ -121,16 +113,96 @@ export class TarTools {
|
||||
content: plugins.fs.createReadStream(absolutePath),
|
||||
});
|
||||
}
|
||||
|
||||
return pack;
|
||||
}
|
||||
|
||||
public async getPackStream() {
|
||||
const pack = plugins.tarStream.pack();
|
||||
return pack;
|
||||
/**
|
||||
* Get a new TAR pack stream
|
||||
*/
|
||||
public async getPackStream(): Promise<plugins.tarStream.Pack> {
|
||||
return plugins.tarStream.pack();
|
||||
}
|
||||
|
||||
// extracting
|
||||
getDecompressionStream() {
|
||||
/**
|
||||
* Get a TAR extraction stream
|
||||
*/
|
||||
public getDecompressionStream(): plugins.tarStream.Extract {
|
||||
return plugins.tarStream.extract();
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack files into a TAR buffer
|
||||
*/
|
||||
public async packFiles(files: IArchiveEntry[]): Promise<Buffer> {
|
||||
const pack = await this.getPackStream();
|
||||
|
||||
for (const file of files) {
|
||||
await this.addFileToPack(pack, {
|
||||
fileName: file.archivePath,
|
||||
content: file.content as string | Buffer | plugins.stream.Readable | plugins.smartfile.SmartFile | plugins.smartfile.StreamFile,
|
||||
byteLength: file.size,
|
||||
});
|
||||
}
|
||||
|
||||
pack.finalize();
|
||||
|
||||
const chunks: Buffer[] = [];
|
||||
return new Promise((resolve, reject) => {
|
||||
pack.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
pack.on('end', () => resolve(Buffer.concat(chunks)));
|
||||
pack.on('error', reject);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack a directory into a TAR.GZ buffer
|
||||
*/
|
||||
public async packDirectoryToTarGz(
|
||||
directoryPath: string,
|
||||
compressionLevel?: TCompressionLevel
|
||||
): Promise<Buffer> {
|
||||
const pack = await this.packDirectory(directoryPath);
|
||||
pack.finalize();
|
||||
|
||||
const gzipTools = new GzipTools();
|
||||
const gzipStream = gzipTools.getCompressionStream(compressionLevel);
|
||||
|
||||
const chunks: Buffer[] = [];
|
||||
return new Promise((resolve, reject) => {
|
||||
pack
|
||||
.pipe(gzipStream)
|
||||
.on('data', (chunk: Buffer) => chunks.push(chunk))
|
||||
.on('end', () => resolve(Buffer.concat(chunks)))
|
||||
.on('error', reject);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack a directory into a TAR.GZ stream
|
||||
*/
|
||||
public async packDirectoryToTarGzStream(
|
||||
directoryPath: string,
|
||||
compressionLevel?: TCompressionLevel
|
||||
): Promise<plugins.stream.Readable> {
|
||||
const pack = await this.packDirectory(directoryPath);
|
||||
pack.finalize();
|
||||
|
||||
const gzipTools = new GzipTools();
|
||||
const gzipStream = gzipTools.getCompressionStream(compressionLevel);
|
||||
|
||||
return pack.pipe(gzipStream);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pack files into a TAR.GZ buffer
|
||||
*/
|
||||
public async packFilesToTarGz(
|
||||
files: IArchiveEntry[],
|
||||
compressionLevel?: TCompressionLevel
|
||||
): Promise<Buffer> {
|
||||
const tarBuffer = await this.packFiles(files);
|
||||
const gzipTools = new GzipTools();
|
||||
return gzipTools.compress(tarBuffer, compressionLevel);
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user