fix(gzip): Improve gzip streaming decompression, archive analysis and unpacking; add gzip tests
This commit is contained in:
219
test/test.gzip.ts
Normal file
219
test/test.gzip.ts
Normal file
@@ -0,0 +1,219 @@
|
||||
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||
import * as plugins from './plugins.js';
|
||||
import * as smartarchive from '../ts/index.js';
|
||||
|
||||
const testPaths = {
|
||||
nogitDir: plugins.path.join(
|
||||
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
|
||||
'../.nogit/',
|
||||
),
|
||||
gzipTestDir: plugins.path.join(
|
||||
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
|
||||
'../.nogit/gzip-test',
|
||||
),
|
||||
};
|
||||
|
||||
tap.preTask('should prepare test directories', async () => {
|
||||
await plugins.smartfile.fs.ensureDir(testPaths.gzipTestDir);
|
||||
});
|
||||
|
||||
tap.test('should create and extract a gzip file', async () => {
|
||||
// Create test data
|
||||
const testContent = 'This is a test file for gzip compression and decompression.\n'.repeat(100);
|
||||
const testFileName = 'test-file.txt';
|
||||
const gzipFileName = 'test-file.txt.gz';
|
||||
|
||||
// Write the original file
|
||||
await plugins.smartfile.memory.toFs(
|
||||
testContent,
|
||||
plugins.path.join(testPaths.gzipTestDir, testFileName)
|
||||
);
|
||||
|
||||
// Compress the file using gzip
|
||||
const originalFile = await plugins.smartfile.fs.fileTreeToObject(
|
||||
testPaths.gzipTestDir,
|
||||
testFileName
|
||||
);
|
||||
|
||||
// Create gzip compressed version using fflate directly
|
||||
const fflate = await import('fflate');
|
||||
const compressed = fflate.gzipSync(Buffer.from(testContent));
|
||||
await plugins.smartfile.memory.toFs(
|
||||
Buffer.from(compressed),
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
// Now test extraction using SmartArchive
|
||||
const gzipArchive = await smartarchive.SmartArchive.fromArchiveFile(
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
// Export to a new location
|
||||
const extractPath = plugins.path.join(testPaths.gzipTestDir, 'extracted');
|
||||
await plugins.smartfile.fs.ensureDir(extractPath);
|
||||
// Provide a filename since gzip doesn't contain filename metadata
|
||||
await gzipArchive.exportToFs(extractPath, 'test-file.txt');
|
||||
|
||||
// Read the extracted file
|
||||
const extractedContent = await plugins.smartfile.fs.toStringSync(
|
||||
plugins.path.join(extractPath, 'test-file.txt')
|
||||
);
|
||||
|
||||
// Verify the content matches
|
||||
expect(extractedContent).toEqual(testContent);
|
||||
});
|
||||
|
||||
tap.test('should handle gzip stream extraction', async () => {
|
||||
// Create test data
|
||||
const testContent = 'Stream test data for gzip\n'.repeat(50);
|
||||
const gzipFileName = 'stream-test.txt.gz';
|
||||
|
||||
// Create gzip compressed version
|
||||
const fflate = await import('fflate');
|
||||
const compressed = fflate.gzipSync(Buffer.from(testContent));
|
||||
await plugins.smartfile.memory.toFs(
|
||||
Buffer.from(compressed),
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
// Create a read stream for the gzip file
|
||||
const gzipStream = plugins.smartfile.fsStream.createReadStream(
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
// Test extraction using SmartArchive from stream
|
||||
const gzipArchive = await smartarchive.SmartArchive.fromArchiveStream(gzipStream);
|
||||
|
||||
// Export to stream and collect the result
|
||||
const streamFiles: any[] = [];
|
||||
const resultStream = await gzipArchive.exportToStreamOfStreamFiles();
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
resultStream.on('data', (streamFile) => {
|
||||
streamFiles.push(streamFile);
|
||||
});
|
||||
resultStream.on('end', resolve);
|
||||
resultStream.on('error', reject);
|
||||
});
|
||||
|
||||
// Verify we got the expected file
|
||||
expect(streamFiles.length).toBeGreaterThan(0);
|
||||
|
||||
// Read content from the stream file
|
||||
if (streamFiles[0]) {
|
||||
const chunks: Buffer[] = [];
|
||||
const readStream = await streamFiles[0].createReadStream();
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
readStream.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
readStream.on('end', resolve);
|
||||
readStream.on('error', reject);
|
||||
});
|
||||
|
||||
const extractedContent = Buffer.concat(chunks).toString();
|
||||
expect(extractedContent).toEqual(testContent);
|
||||
}
|
||||
});
|
||||
|
||||
tap.test('should handle gzip files with original filename in header', async () => {
|
||||
// Test with a real-world gzip file that includes filename in header
|
||||
const testContent = 'File with name in gzip header\n'.repeat(30);
|
||||
const originalFileName = 'original-name.log';
|
||||
const gzipFileName = 'compressed.gz';
|
||||
|
||||
// Create a proper gzip with filename header using Node's zlib
|
||||
const zlib = await import('zlib');
|
||||
const gzipBuffer = await new Promise<Buffer>((resolve, reject) => {
|
||||
zlib.gzip(Buffer.from(testContent), {
|
||||
level: 9,
|
||||
// Note: Node's zlib doesn't support embedding filename directly,
|
||||
// but we can test the extraction anyway
|
||||
}, (err, result) => {
|
||||
if (err) reject(err);
|
||||
else resolve(result);
|
||||
});
|
||||
});
|
||||
|
||||
await plugins.smartfile.memory.toFs(
|
||||
gzipBuffer,
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
// Test extraction
|
||||
const gzipArchive = await smartarchive.SmartArchive.fromArchiveFile(
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
const extractPath = plugins.path.join(testPaths.gzipTestDir, 'header-test');
|
||||
await plugins.smartfile.fs.ensureDir(extractPath);
|
||||
// Provide a filename since gzip doesn't reliably contain filename metadata
|
||||
await gzipArchive.exportToFs(extractPath, 'compressed.txt');
|
||||
|
||||
// Check if file was extracted (name might be derived from archive name)
|
||||
const files = await plugins.smartfile.fs.listFileTree(extractPath, '**/*');
|
||||
expect(files.length).toBeGreaterThan(0);
|
||||
|
||||
// Read and verify content
|
||||
const extractedFile = files[0];
|
||||
const extractedContent = await plugins.smartfile.fs.toStringSync(
|
||||
plugins.path.join(extractPath, extractedFile || 'compressed.txt')
|
||||
);
|
||||
expect(extractedContent).toEqual(testContent);
|
||||
});
|
||||
|
||||
tap.test('should handle large gzip files', async () => {
|
||||
// Create a larger test file
|
||||
const largeContent = 'x'.repeat(1024 * 1024); // 1MB of 'x' characters
|
||||
const gzipFileName = 'large-file.txt.gz';
|
||||
|
||||
// Compress the large file
|
||||
const fflate = await import('fflate');
|
||||
const compressed = fflate.gzipSync(Buffer.from(largeContent));
|
||||
await plugins.smartfile.memory.toFs(
|
||||
Buffer.from(compressed),
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
// Test extraction
|
||||
const gzipArchive = await smartarchive.SmartArchive.fromArchiveFile(
|
||||
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
|
||||
);
|
||||
|
||||
const extractPath = plugins.path.join(testPaths.gzipTestDir, 'large-extracted');
|
||||
await plugins.smartfile.fs.ensureDir(extractPath);
|
||||
// Provide a filename since gzip doesn't contain filename metadata
|
||||
await gzipArchive.exportToFs(extractPath, 'large-file.txt');
|
||||
|
||||
// Verify the extracted content
|
||||
const files = await plugins.smartfile.fs.listFileTree(extractPath, '**/*');
|
||||
expect(files.length).toBeGreaterThan(0);
|
||||
|
||||
const extractedContent = await plugins.smartfile.fs.toStringSync(
|
||||
plugins.path.join(extractPath, files[0] || 'large-file.txt')
|
||||
);
|
||||
expect(extractedContent.length).toEqual(largeContent.length);
|
||||
expect(extractedContent).toEqual(largeContent);
|
||||
});
|
||||
|
||||
tap.test('should handle real-world multi-chunk gzip from URL', async () => {
|
||||
// Test with a real tgz file that will be processed in multiple chunks
|
||||
const testUrl = 'https://registry.npmjs.org/@push.rocks/smartfile/-/smartfile-11.2.7.tgz';
|
||||
|
||||
// Download and extract the archive
|
||||
const testArchive = await smartarchive.SmartArchive.fromArchiveUrl(testUrl);
|
||||
|
||||
const extractPath = plugins.path.join(testPaths.gzipTestDir, 'real-world-test');
|
||||
await plugins.smartfile.fs.ensureDir(extractPath);
|
||||
|
||||
// This will test multi-chunk decompression as the file is larger
|
||||
await testArchive.exportToFs(extractPath);
|
||||
|
||||
// Verify extraction worked
|
||||
const files = await plugins.smartfile.fs.listFileTree(extractPath, '**/*');
|
||||
expect(files.length).toBeGreaterThan(0);
|
||||
|
||||
// Check for expected package structure
|
||||
const hasPackageJson = files.some(f => f.includes('package.json'));
|
||||
expect(hasPackageJson).toBeTrue();
|
||||
});
|
||||
|
||||
export default tap.start();
|
Reference in New Issue
Block a user