Files
smartarchive/test/test.gzip.ts

400 lines
14 KiB
TypeScript

import { tap, expect } from '@git.zone/tstest/tapbundle';
import * as plugins from './plugins.js';
import * as smartarchive from '../ts/index.js';
const testPaths = {
nogitDir: plugins.path.join(
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
'../.nogit/',
),
gzipTestDir: plugins.path.join(
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
'../.nogit/gzip-test',
),
};
tap.preTask('should prepare test directories', async () => {
await plugins.smartfile.fs.ensureDir(testPaths.gzipTestDir);
});
tap.test('should create and extract a gzip file', async () => {
// Create test data
const testContent = 'This is a test file for gzip compression and decompression.\n'.repeat(100);
const testFileName = 'test-file.txt';
const gzipFileName = 'test-file.txt.gz';
// Write the original file
await plugins.smartfile.memory.toFs(
testContent,
plugins.path.join(testPaths.gzipTestDir, testFileName)
);
// Compress the file using gzip
const originalFile = await plugins.smartfile.fs.fileTreeToObject(
testPaths.gzipTestDir,
testFileName
);
// Create gzip compressed version using fflate directly
const fflate = await import('fflate');
const compressed = fflate.gzipSync(Buffer.from(testContent));
await plugins.smartfile.memory.toFs(
Buffer.from(compressed),
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
);
// Now test extraction using SmartArchive
const gzipArchive = await smartarchive.SmartArchive.fromArchiveFile(
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
);
// Export to a new location
const extractPath = plugins.path.join(testPaths.gzipTestDir, 'extracted');
await plugins.smartfile.fs.ensureDir(extractPath);
// Provide a filename since gzip doesn't contain filename metadata
await gzipArchive.exportToFs(extractPath, 'test-file.txt');
// Read the extracted file
const extractedContent = await plugins.smartfile.fs.toStringSync(
plugins.path.join(extractPath, 'test-file.txt')
);
// Verify the content matches
expect(extractedContent).toEqual(testContent);
});
tap.test('should handle gzip stream extraction', async () => {
// Create test data
const testContent = 'Stream test data for gzip\n'.repeat(50);
const gzipFileName = 'stream-test.txt.gz';
// Create gzip compressed version
const fflate = await import('fflate');
const compressed = fflate.gzipSync(Buffer.from(testContent));
await plugins.smartfile.memory.toFs(
Buffer.from(compressed),
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
);
// Create a read stream for the gzip file
const gzipStream = plugins.smartfile.fsStream.createReadStream(
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
);
// Test extraction using SmartArchive from stream
const gzipArchive = await smartarchive.SmartArchive.fromArchiveStream(gzipStream);
// Export to stream and collect the result
const streamFiles: any[] = [];
const resultStream = await gzipArchive.exportToStreamOfStreamFiles();
await new Promise<void>((resolve, reject) => {
resultStream.on('data', (streamFile) => {
streamFiles.push(streamFile);
});
resultStream.on('end', resolve);
resultStream.on('error', reject);
});
// Verify we got the expected file
expect(streamFiles.length).toBeGreaterThan(0);
// Read content from the stream file
if (streamFiles[0]) {
const chunks: Buffer[] = [];
const readStream = await streamFiles[0].createReadStream();
await new Promise<void>((resolve, reject) => {
readStream.on('data', (chunk: Buffer) => chunks.push(chunk));
readStream.on('end', resolve);
readStream.on('error', reject);
});
const extractedContent = Buffer.concat(chunks).toString();
expect(extractedContent).toEqual(testContent);
}
});
tap.test('should handle gzip files with original filename in header', async () => {
// Test with a real-world gzip file that includes filename in header
const testContent = 'File with name in gzip header\n'.repeat(30);
const originalFileName = 'original-name.log';
const gzipFileName = 'compressed.gz';
// Create a proper gzip with filename header using Node's zlib
const zlib = await import('zlib');
const gzipBuffer = await new Promise<Buffer>((resolve, reject) => {
zlib.gzip(Buffer.from(testContent), {
level: 9,
// Note: Node's zlib doesn't support embedding filename directly,
// but we can test the extraction anyway
}, (err, result) => {
if (err) reject(err);
else resolve(result);
});
});
await plugins.smartfile.memory.toFs(
gzipBuffer,
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
);
// Test extraction
const gzipArchive = await smartarchive.SmartArchive.fromArchiveFile(
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
);
const extractPath = plugins.path.join(testPaths.gzipTestDir, 'header-test');
await plugins.smartfile.fs.ensureDir(extractPath);
// Provide a filename since gzip doesn't reliably contain filename metadata
await gzipArchive.exportToFs(extractPath, 'compressed.txt');
// Check if file was extracted (name might be derived from archive name)
const files = await plugins.smartfile.fs.listFileTree(extractPath, '**/*');
expect(files.length).toBeGreaterThan(0);
// Read and verify content
const extractedFile = files[0];
const extractedContent = await plugins.smartfile.fs.toStringSync(
plugins.path.join(extractPath, extractedFile || 'compressed.txt')
);
expect(extractedContent).toEqual(testContent);
});
tap.test('should handle large gzip files', async () => {
// Create a larger test file
const largeContent = 'x'.repeat(1024 * 1024); // 1MB of 'x' characters
const gzipFileName = 'large-file.txt.gz';
// Compress the large file
const fflate = await import('fflate');
const compressed = fflate.gzipSync(Buffer.from(largeContent));
await plugins.smartfile.memory.toFs(
Buffer.from(compressed),
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
);
// Test extraction
const gzipArchive = await smartarchive.SmartArchive.fromArchiveFile(
plugins.path.join(testPaths.gzipTestDir, gzipFileName)
);
const extractPath = plugins.path.join(testPaths.gzipTestDir, 'large-extracted');
await plugins.smartfile.fs.ensureDir(extractPath);
// Provide a filename since gzip doesn't contain filename metadata
await gzipArchive.exportToFs(extractPath, 'large-file.txt');
// Verify the extracted content
const files = await plugins.smartfile.fs.listFileTree(extractPath, '**/*');
expect(files.length).toBeGreaterThan(0);
const extractedContent = await plugins.smartfile.fs.toStringSync(
plugins.path.join(extractPath, files[0] || 'large-file.txt')
);
expect(extractedContent.length).toEqual(largeContent.length);
expect(extractedContent).toEqual(largeContent);
});
tap.test('should handle real-world multi-chunk gzip from URL', async () => {
// Test with a real tgz file that will be processed in multiple chunks
const testUrl = 'https://registry.npmjs.org/@push.rocks/smartfile/-/smartfile-11.2.7.tgz';
// Download and extract the archive
const testArchive = await smartarchive.SmartArchive.fromArchiveUrl(testUrl);
const extractPath = plugins.path.join(testPaths.gzipTestDir, 'real-world-test');
await plugins.smartfile.fs.ensureDir(extractPath);
// This will test multi-chunk decompression as the file is larger
await testArchive.exportToFs(extractPath);
// Verify extraction worked
const files = await plugins.smartfile.fs.listFileTree(extractPath, '**/*');
expect(files.length).toBeGreaterThan(0);
// Check for expected package structure
const hasPackageJson = files.some(f => f.includes('package.json'));
expect(hasPackageJson).toBeTrue();
// Read and verify package.json content
const packageJsonPath = files.find(f => f.includes('package.json'));
if (packageJsonPath) {
const packageJsonContent = await plugins.smartfile.fs.toStringSync(
plugins.path.join(extractPath, packageJsonPath)
);
const packageJson = JSON.parse(packageJsonContent);
expect(packageJson.name).toEqual('@push.rocks/smartfile');
expect(packageJson.version).toEqual('11.2.7');
}
// Read and verify a TypeScript file
const tsFilePath = files.find(f => f.endsWith('.ts'));
if (tsFilePath) {
const tsFileContent = await plugins.smartfile.fs.toStringSync(
plugins.path.join(extractPath, tsFilePath)
);
// TypeScript files should have content
expect(tsFileContent.length).toBeGreaterThan(10);
console.log(` ✓ TypeScript file ${tsFilePath} has ${tsFileContent.length} bytes`);
}
// Read and verify license file
const licensePath = files.find(f => f.includes('license'));
if (licensePath) {
const licenseContent = await plugins.smartfile.fs.toStringSync(
plugins.path.join(extractPath, licensePath)
);
expect(licenseContent).toContain('MIT');
}
// Verify we can read multiple files without corruption
const readableFiles = files.filter(f =>
f.endsWith('.json') || f.endsWith('.md') || f.endsWith('.ts') || f.endsWith('.js')
).slice(0, 5); // Test first 5 readable files
for (const file of readableFiles) {
const content = await plugins.smartfile.fs.toStringSync(
plugins.path.join(extractPath, file)
);
expect(content).toBeDefined();
expect(content.length).toBeGreaterThan(0);
console.log(` ✓ Successfully read ${file} (${content.length} bytes)`);
}
});
tap.test('should handle gzip extraction fully in memory', async () => {
// Create test data in memory
const testContent = 'This is test data for in-memory gzip processing\n'.repeat(100);
// Compress using fflate in memory
const fflate = await import('fflate');
const compressed = fflate.gzipSync(Buffer.from(testContent));
// Create a stream from the compressed data
const { Readable } = await import('stream');
const compressedStream = Readable.from(Buffer.from(compressed));
// Process through SmartArchive without touching filesystem
const gzipArchive = await smartarchive.SmartArchive.fromArchiveStream(compressedStream);
// Export to stream of stream files (in memory)
const streamFiles: plugins.smartfile.StreamFile[] = [];
const resultStream = await gzipArchive.exportToStreamOfStreamFiles();
await new Promise<void>((resolve, reject) => {
resultStream.on('data', (streamFile: plugins.smartfile.StreamFile) => {
streamFiles.push(streamFile);
});
resultStream.on('end', resolve);
resultStream.on('error', reject);
});
// Verify we got a file
expect(streamFiles.length).toBeGreaterThan(0);
// Read the content from memory without filesystem
const firstFile = streamFiles[0];
const chunks: Buffer[] = [];
const readStream = await firstFile.createReadStream();
await new Promise<void>((resolve, reject) => {
readStream.on('data', (chunk: Buffer) => chunks.push(chunk));
readStream.on('end', resolve);
readStream.on('error', reject);
});
const extractedContent = Buffer.concat(chunks).toString();
expect(extractedContent).toEqual(testContent);
console.log(` ✓ In-memory extraction successful (${extractedContent.length} bytes)`);
});
tap.test('should handle real tgz file fully in memory', async (tools) => {
await tools.timeout(10000); // Set 10 second timeout
// Download tgz file into memory
const response = await plugins.smartrequest.SmartRequest.create()
.url('https://registry.npmjs.org/@push.rocks/smartfile/-/smartfile-11.2.7.tgz')
.get();
const tgzBuffer = Buffer.from(await response.arrayBuffer());
console.log(` Downloaded ${tgzBuffer.length} bytes into memory`);
// Create stream from buffer
const { Readable: Readable2 } = await import('stream');
const tgzStream = Readable2.from(tgzBuffer);
// Process through SmartArchive in memory
const archive = await smartarchive.SmartArchive.fromArchiveStream(tgzStream);
// Export to stream of stream files (in memory)
const streamFiles: plugins.smartfile.StreamFile[] = [];
const resultStream = await archive.exportToStreamOfStreamFiles();
await new Promise<void>((resolve, reject) => {
let timeout: NodeJS.Timeout;
const cleanup = () => {
clearTimeout(timeout);
};
timeout = setTimeout(() => {
cleanup();
resolve(); // Resolve after timeout if stream doesn't end
}, 5000);
resultStream.on('data', (streamFile: plugins.smartfile.StreamFile) => {
streamFiles.push(streamFile);
});
resultStream.on('end', () => {
cleanup();
resolve();
});
resultStream.on('error', (err) => {
cleanup();
reject(err);
});
});
console.log(` Extracted ${streamFiles.length} files in memory`);
// At minimum we should have extracted something
expect(streamFiles.length).toBeGreaterThan(0);
// Find and read package.json from memory
const packageJsonFile = streamFiles.find(f => f.relativeFilePath?.includes('package.json'));
if (packageJsonFile) {
const chunks: Buffer[] = [];
const readStream = await packageJsonFile.createReadStream();
await new Promise<void>((resolve, reject) => {
readStream.on('data', (chunk: Buffer) => chunks.push(chunk));
readStream.on('end', resolve);
readStream.on('error', reject);
});
const packageJsonContent = Buffer.concat(chunks).toString();
const packageJson = JSON.parse(packageJsonContent);
expect(packageJson.name).toEqual('@push.rocks/smartfile');
expect(packageJson.version).toEqual('11.2.7');
console.log(` ✓ Read package.json from memory: ${packageJson.name}@${packageJson.version}`);
}
// Read a few more files to verify integrity
const filesToCheck = streamFiles.slice(0, 3);
for (const file of filesToCheck) {
const chunks: Buffer[] = [];
const readStream = await file.createReadStream();
await new Promise<void>((resolve, reject) => {
readStream.on('data', (chunk: Buffer) => chunks.push(chunk));
readStream.on('end', resolve);
readStream.on('error', reject);
});
const content = Buffer.concat(chunks);
expect(content.length).toBeGreaterThan(0);
console.log(` ✓ Read ${file.relativeFilePath} from memory (${content.length} bytes)`);
}
});
export default tap.start();