diff --git a/npmextra.json b/npmextra.json index 5a78b49..c50eafe 100644 --- a/npmextra.json +++ b/npmextra.json @@ -18,8 +18,12 @@ "decompression", "zip", "tar", + "gzip", "bzip2", - "gzip" + "file extraction", + "file creation", + "data analysis", + "file stream" ] } }, diff --git a/package.json b/package.json index 9f7af68..5e78cdf 100644 --- a/package.json +++ b/package.json @@ -64,7 +64,11 @@ "decompression", "zip", "tar", + "gzip", "bzip2", - "gzip" + "file extraction", + "file creation", + "data analysis", + "file stream" ] -} +} \ No newline at end of file diff --git a/readme.md b/readme.md index 9a1ceff..b72483e 100644 --- a/readme.md +++ b/readme.md @@ -1,9 +1,10 @@ # @push.rocks/smartarchive -work with archives + +`@push.rocks/smartarchive` is a powerful library designed for managing archive files. It provides utilities for compressing and decompressing data in various formats such as zip, tar, gzip, and bzip2. This library aims to simplify the process of handling archive files, making it an ideal choice for projects that require manipulation of archived data. ## Install -To install `@push.rocks/smartarchive`, you need to use npm or yarn. Run either of the following commands in your project directory: +To install `@push.rocks/smartarchive`, you can either use npm or yarn. Run one of the following commands in your project directory: ```shell npm install @push.rocks/smartarchive --save @@ -18,10 +19,9 @@ yarn add @push.rocks/smartarchive This will add `@push.rocks/smartarchive` to your project's dependencies. ## Usage +`@push.rocks/smartarchive` provides an easy-to-use API for extracting, creating, and analyzing archive files. Below, we'll cover how to get started and explore various features of the module. -`@push.rocks/smartarchive` is a powerful module designed to simplify the process of working with archive files such as zip, tar, gzip, and more. It provides an easy-to-use API for extracting, creating, and analyzing archives, making it an ideal choice for projects that require manipulation of archive files. - -### Getting Started +### Importing SmartArchive First, import `SmartArchive` from `@push.rocks/smartarchive` using ESM syntax: @@ -31,9 +31,11 @@ import { SmartArchive } from '@push.rocks/smartarchive'; ### Extracting Archive Files -To extract an archive file, you can use `SmartArchive.fromArchiveUrl`, `SmartArchive.fromArchiveFile`, or `SmartArchive.fromArchiveStream` methods depending on the source of your archive. Here's an example of extracting an archive from a URL: +You can extract archive files from different sources using `SmartArchive.fromArchiveUrl`, `SmartArchive.fromArchiveFile`, and `SmartArchive.fromArchiveStream`. Here's an example of extracting an archive from a URL: ```typescript +import { SmartArchive } from '@push.rocks/smartarchive'; + async function extractArchiveFromURL() { const url = 'https://example.com/archive.zip'; const targetDir = '/path/to/extract'; @@ -43,35 +45,37 @@ async function extractArchiveFromURL() { console.log('Archive extracted successfully.'); } + +extractArchiveFromURL(); ``` -### Creating Archive Files +### Extracting an Archive from a File -Creating archive files such as zip or tar.gz is straightforward with `smartarchive`. At the moment, you'll prepare the contents programmatically and then compress them. Detailed support for creating archives will be covered in future updates. - -### Analyzing Archive Files - -Analyzing the content of archives without extracting them can be useful in various scenarios, such as when you need to inspect the archive's content before deciding to extract it. Here's how you might analyze an archive: +Similarly, you can extract an archive from a local file: ```typescript -async function analyzeArchive() { - const url = 'https://example.com/archive.zip'; - - const archive = await SmartArchive.fromArchiveUrl(url); - const analysisResult = await archive.analyzeContent(); - - console.log(analysisResult); // Outputs details about the archive content +import { SmartArchive } from '@push.rocks/smartarchive'; + +async function extractArchiveFromFile() { + const filePath = '/path/to/archive.zip'; + const targetDir = '/path/to/extract'; + + const archive = await SmartArchive.fromArchiveFile(filePath); + await archive.exportToFs(targetDir); + + console.log('Archive extracted successfully.'); } + +extractArchiveFromFile(); ``` -Note: Replace `analyzeContent` with the appropriate method calls as per your implementation or update, as `smartarchive` provides foundational classes and methods for interaction with archive files but does not directly implement an `analyzeContent` method by default. +### Stream-Based Extraction -### Stream Operations - -`smartarchive` offers streaming operations, allowing you to work with large archives efficiently. Here's an example of using streams to extract an archive: +For larger files, you might prefer a streaming approach to prevent high memory consumption. Here’s an example: ```typescript -import { createReadStream, createWriteStream } from 'fs'; +import { SmartArchive } from '@push.rocks/smartarchive'; +import { createReadStream } from 'fs'; async function extractArchiveUsingStream() { const archiveStream = createReadStream('/path/to/archive.zip'); @@ -80,15 +84,183 @@ async function extractArchiveUsingStream() { extractionStream.pipe(createWriteStream('/path/to/destination')); } + +extractArchiveUsingStream(); ``` -### Conclusion +### Analyzing Archive Files -`@push.rocks/smartarchive` simplifies the process of working with various archive formats in JavaScript and TypeScript projects. By providing an easy-to-use API for common archive operations, it enables developers to integrate archive manipulation features into their applications efficiently. +Sometimes, you may need to inspect the contents of an archive before extracting it. The following example shows how to analyze an archive: -Remember, the examples provided here are to give you a starting point. Depending on your specific use case, you may need to adjust these examples to fit your project's requirements. Always refer to the latest documentation for the most current information and methods available in `@push.rocks/smartarchive`. +```typescript +import { SmartArchive } from '@push.rocks/smartarchive'; -For more information and API references, check the official [`@push.rocks/smartarchive` GitHub repository](https://github.com/pushrocks/smartarchive). +async function analyzeArchive() { + const filePath = '/path/to/archive.zip'; + + const archive = await SmartArchive.fromArchiveFile(filePath); + const analysisResult = await archive.analyzeContent(); + + console.log(analysisResult); // Outputs details about the archive content +} + +analyzeArchive(); +``` + +### Creating Archive Files + +Creating an archive file is straightforward. Here we demonstrate creating a tar.gz archive: + +```typescript +import { SmartArchive } from '@push.rocks/smartarchive'; + +async function createTarGzArchive() { + const archive = new SmartArchive(); + + // Add directories and files + archive.addedDirectories.push('/path/to/directory1'); + archive.addedFiles.push('/path/to/file1.txt'); + + // Export as tar.gz + const tarGzStream = await archive.exportToTarGzStream(); + + // Save to filesystem or handle as needed + tarGzStream.pipe(createWriteStream('/path/to/destination.tar.gz')); +} + +createTarGzArchive(); +``` + +### Stream Operations + +Here's an example of using `smartarchive`'s streaming capabilities: + +```typescript +import { createReadStream, createWriteStream } from 'fs'; +import { SmartArchive } from '@push.rocks/smartarchive'; + +async function extractArchiveUsingStreams() { + const archiveStream = createReadStream('/path/to/archive.zip'); + const archive = await SmartArchive.fromArchiveStream(archiveStream); + const extractionStream = await archive.exportToStreamOfStreamFiles(); + + extractionStream.pipe(createWriteStream('/path/to/extracted')); +} + +extractArchiveUsingStreams(); +``` + +### Advanced Decompression Usage + +`smartarchive` supports multiple compression formats. It also provides detailed control over the decompression processes: + +- For ZIP files, `ZipTools` handles decompression using the `fflate` library. +- For TAR files, `TarTools` uses `tar-stream`. +- For GZIP files, `GzipTools` provides a `CompressGunzipTransform` and `DecompressGunzipTransform`. +- For BZIP2 files, `Bzip2Tools` utilizes custom streaming decompression. + +Example: Working with a GZIP-compressed archive: + +```typescript +import { createReadStream, createWriteStream } from 'fs'; +import { SmartArchive } from '@push.rocks/smartarchive'; + +async function decompressGzipArchive() { + const filePath = '/path/to/archive.gz'; + const targetDir = '/path/to/extract'; + + const archive = await SmartArchive.fromArchiveFile(filePath); + await archive.exportToFs(targetDir); + + console.log('GZIP archive decompressed successfully.'); +} + +decompressGzipArchive(); +``` + +### Advancing with Custom Decompression Streams + +You can inject custom decompression streams where needed: + +```typescript +import { createReadStream, createWriteStream } from 'fs'; +import { SmartArchive, GzipTools } from '@push.rocks/smartarchive'; + +async function customDecompression() { + const filePath = '/path/to/archive.gz'; + const targetDir = '/path/to/extract'; + + const archive = await SmartArchive.fromArchiveFile(filePath); + const gzipTools = new GzipTools(); + const decompressionStream = gzipTools.getDecompressionStream(); + + const archiveStream = await archive.getArchiveStream(); + archiveStream.pipe(decompressionStream).pipe(createWriteStream(targetDir)); + + console.log('Custom GZIP decompression successful.'); +} + +customDecompression(); +``` + +### Custom Pack and Unpack Tar + +When dealing with tar archives, you may need to perform custom packing and unpacking: + +```typescript +import { SmartArchive, TarTools } from '@push.rocks/smartarchive'; +import { createWriteStream } from 'fs'; + +async function customTarOperations() { + const tarTools = new TarTools(); + + // Packing a directory into a tar stream + const packStream = await tarTools.packDirectory('/path/to/directory'); + packStream.pipe(createWriteStream('/path/to/archive.tar')); + + // Extracting files from a tar stream + const extractStream = tarTools.getDecompressionStream(); + createReadStream('/path/to/archive.tar').pipe(extractStream).on('entry', (header, stream, next) => { + const writeStream = createWriteStream(`/path/to/extract/${header.name}`); + stream.pipe(writeStream); + stream.on('end', next); + }); +} + +customTarOperations(); +``` + +### Extract and Analyze All-in-One + +To extract and simultaneously analyze archive content: + +```typescript +import { createReadStream, createWriteStream } from 'fs'; +import { SmartArchive } from '@push.rocks/smartarchive'; + +async function extractAndAnalyze() { + const filePath = '/path/to/archive.zip'; + const targetDir = '/path/to/extract'; + + const archive = await SmartArchive.fromArchiveFile(filePath); + const analyzedStream = archive.archiveAnalyzer.getAnalyzedStream(); + const extractionStream = await archive.exportToStreamOfStreamFiles(); + + analyzedStream.pipe(extractionStream).pipe(createWriteStream(targetDir)); + + analyzedStream.on('data', (chunk) => { + console.log(JSON.stringify(chunk, null, 2)); + }); +} + +extractAndAnalyze(); +``` + +### Final Words + +These examples demonstrate various use cases for `@push.rocks/smartarchive`. Depending on your specific project requirements, you can adapt these examples to suit your needs. Always refer to the latest documentation for the most current information and methods available in `@push.rocks/smartarchive`. + +For more information and API references, check the official [`@push.rocks/smartarchive` GitHub repository](https://code.foss.global/push.rocks/smartarchive). ## License and Legal Information diff --git a/ts/00_commitinfo_data.ts b/ts/00_commitinfo_data.ts index 3945733..904927d 100644 --- a/ts/00_commitinfo_data.ts +++ b/ts/00_commitinfo_data.ts @@ -3,6 +3,6 @@ */ export const commitinfo = { name: '@push.rocks/smartarchive', - version: '4.0.32', + version: '4.0.33', description: 'A library for working with archive files, providing utilities for compressing and decompressing data.' }