Files
containerarchive/test/test.ts

301 lines
10 KiB
TypeScript

import { tap, expect } from '@git.zone/tstest/tapbundle';
import * as path from 'node:path';
import * as fs from 'node:fs';
import * as stream from 'node:stream';
import { ContainerArchive } from '../ts/index.js';
const testRepoPath = path.resolve('.nogit/test-repo');
const testRepoEncryptedPath = path.resolve('.nogit/test-repo-encrypted');
const testRepoZstdPath = path.resolve('.nogit/test-repo-zstd');
// Clean up test directories before tests
tap.preTask('cleanup test directories', async () => {
for (const p of [testRepoPath, testRepoEncryptedPath, testRepoZstdPath]) {
if (fs.existsSync(p)) {
fs.rmSync(p, { recursive: true });
}
}
fs.mkdirSync('.nogit', { recursive: true });
});
// ==================== Basic Repository Lifecycle ====================
let repo: ContainerArchive;
tap.test('should initialize a new repository', async () => {
repo = await ContainerArchive.init(testRepoPath);
expect(repo).toBeTruthy();
// Verify directory structure was created
expect(fs.existsSync(path.join(testRepoPath, 'config.json'))).toBeTrue();
expect(fs.existsSync(path.join(testRepoPath, 'packs', 'data'))).toBeTrue();
expect(fs.existsSync(path.join(testRepoPath, 'snapshots'))).toBeTrue();
expect(fs.existsSync(path.join(testRepoPath, 'index'))).toBeTrue();
});
// ==================== Ingest ====================
tap.test('should ingest data and create a snapshot', async () => {
// Create a 512KB buffer with deterministic content
const testData = Buffer.alloc(512 * 1024);
for (let i = 0; i < testData.length; i++) {
testData[i] = i % 256;
}
const inputStream = stream.Readable.from(testData);
const snapshot = await repo.ingest(inputStream, {
tags: { service: 'test', type: 'unit-test' },
items: [{ name: 'test-data.bin', type: 'binary' }],
});
expect(snapshot).toBeTruthy();
expect(snapshot.id).toBeTruthy();
expect(snapshot.originalSize).toEqual(512 * 1024);
expect(snapshot.newChunks).toBeGreaterThan(0);
expect(snapshot.items.length).toEqual(1);
expect(snapshot.items[0].name).toEqual('test-data.bin');
});
// ==================== Dedup ====================
tap.test('should deduplicate on second ingest of same data', async () => {
// Ingest the exact same data again
const testData = Buffer.alloc(512 * 1024);
for (let i = 0; i < testData.length; i++) {
testData[i] = i % 256;
}
const inputStream = stream.Readable.from(testData);
const snapshot = await repo.ingest(inputStream, {
tags: { service: 'test', type: 'dedup-test' },
items: [{ name: 'test-data-dup.bin', type: 'binary' }],
});
expect(snapshot).toBeTruthy();
expect(snapshot.newChunks).toEqual(0);
expect(snapshot.reusedChunks).toBeGreaterThan(0);
});
// ==================== List Snapshots ====================
tap.test('should list snapshots', async () => {
const snapshots = await repo.listSnapshots();
expect(snapshots.length).toEqual(2);
});
tap.test('should filter snapshots by tag', async () => {
const snapshots = await repo.listSnapshots({
tags: { type: 'dedup-test' },
});
expect(snapshots.length).toEqual(1);
expect(snapshots[0].tags.type).toEqual('dedup-test');
});
// ==================== Restore ====================
tap.test('should restore data byte-for-byte', async () => {
const snapshots = await repo.listSnapshots();
const snapshotId = snapshots[snapshots.length - 1].id; // oldest
const restoreStream = await repo.restore(snapshotId);
const chunks: Buffer[] = [];
await new Promise<void>((resolve, reject) => {
restoreStream.on('data', (chunk: Buffer) => chunks.push(chunk));
restoreStream.on('end', resolve);
restoreStream.on('error', reject);
});
const restored = Buffer.concat(chunks);
// Create expected data
const expected = Buffer.alloc(512 * 1024);
for (let i = 0; i < expected.length; i++) {
expected[i] = i % 256;
}
expect(restored.length).toEqual(expected.length);
expect(restored.equals(expected)).toBeTrue();
});
// ==================== Multi-Item Ingest ====================
tap.test('should ingest multiple items in one snapshot', async () => {
const data1 = Buffer.alloc(64 * 1024, 'item-one-data');
const data2 = Buffer.alloc(32 * 1024, 'item-two-data');
const snapshot = await repo.ingestMulti([
{ stream: stream.Readable.from(data1), name: 'database.sql', type: 'database-dump' },
{ stream: stream.Readable.from(data2), name: 'config.tar', type: 'volume-tar' },
], { tags: { type: 'multi-test' } });
expect(snapshot).toBeTruthy();
expect(snapshot.items.length).toEqual(2);
expect(snapshot.items[0].name).toEqual('database.sql');
expect(snapshot.items[1].name).toEqual('config.tar');
expect(snapshot.items[0].size).toEqual(64 * 1024);
expect(snapshot.items[1].size).toEqual(32 * 1024);
});
tap.test('should restore specific item from multi-item snapshot', async () => {
const snapshots = await repo.listSnapshots({ tags: { type: 'multi-test' } });
expect(snapshots.length).toEqual(1);
const restoreStream = await repo.restore(snapshots[0].id, { item: 'config.tar' });
const chunks: Buffer[] = [];
await new Promise<void>((resolve, reject) => {
restoreStream.on('data', (chunk: Buffer) => chunks.push(chunk));
restoreStream.on('end', resolve);
restoreStream.on('error', reject);
});
const restored = Buffer.concat(chunks);
const expected = Buffer.alloc(32 * 1024, 'item-two-data');
expect(restored.length).toEqual(expected.length);
expect(restored.equals(expected)).toBeTrue();
});
// ==================== Verify ====================
tap.test('should verify repository at quick level', async () => {
const result = await repo.verify({ level: 'quick' });
expect(result.ok).toBeTrue();
expect(result.errors.length).toEqual(0);
});
tap.test('should verify repository at standard level', async () => {
const result = await repo.verify({ level: 'standard' });
expect(result.ok).toBeTrue();
});
tap.test('should verify repository at full level', async () => {
const result = await repo.verify({ level: 'full' });
expect(result.ok).toBeTrue();
expect(result.stats.chunksChecked).toBeGreaterThan(0);
});
// ==================== Prune ====================
tap.test('should prune with keepLast=1', async () => {
const snapshotsBefore = await repo.listSnapshots();
const result = await repo.prune({ keepLast: 1 });
expect(result.removedSnapshots).toEqual(snapshotsBefore.length - 1);
expect(result.dryRun).toBeFalse();
// Verify only 1 snapshot remains
const snapshots = await repo.listSnapshots();
expect(snapshots.length).toEqual(1);
});
// ==================== Close ====================
tap.test('should close repository', async () => {
await repo.close();
});
// ==================== Reopen ====================
tap.test('should reopen repository', async () => {
repo = await ContainerArchive.open(testRepoPath);
const snapshots = await repo.listSnapshots();
expect(snapshots.length).toEqual(1);
await repo.close();
});
// ==================== Encrypted Repository ====================
tap.test('should create and use encrypted repository', async () => {
const encRepo = await ContainerArchive.init(testRepoEncryptedPath, {
passphrase: 'test-password-123',
});
// Verify key file was created
const keysDir = path.join(testRepoEncryptedPath, 'keys');
const keyFiles = fs.readdirSync(keysDir).filter((f: string) => f.endsWith('.key'));
expect(keyFiles.length).toEqual(1);
// Ingest data
const testData = Buffer.alloc(128 * 1024, 'encrypted-test-data');
const inputStream = stream.Readable.from(testData);
const snapshot = await encRepo.ingest(inputStream, {
tags: { encrypted: 'true' },
items: [{ name: 'secret.bin' }],
});
expect(snapshot.newChunks).toBeGreaterThan(0);
// Restore and verify
const restoreStream = await encRepo.restore(snapshot.id);
const chunks: Buffer[] = [];
await new Promise<void>((resolve, reject) => {
restoreStream.on('data', (chunk: Buffer) => chunks.push(chunk));
restoreStream.on('end', resolve);
restoreStream.on('error', reject);
});
const restored = Buffer.concat(chunks);
expect(restored.length).toEqual(testData.length);
expect(restored.equals(testData)).toBeTrue();
await encRepo.close();
});
tap.test('should open encrypted repository with correct passphrase', async () => {
const encRepo = await ContainerArchive.open(testRepoEncryptedPath, {
passphrase: 'test-password-123',
});
const snapshots = await encRepo.listSnapshots();
expect(snapshots.length).toEqual(1);
await encRepo.close();
});
// ==================== Zstd Compression ====================
tap.test('should work with zstd compression', async () => {
// Init repo — the config.json will have compression: "gzip" by default.
// To test zstd, we manually update the config after init.
const zstdRepo = await ContainerArchive.init(testRepoZstdPath);
await zstdRepo.close();
// Patch config.json to use zstd
const configPath = path.join(testRepoZstdPath, 'config.json');
const config = JSON.parse(fs.readFileSync(configPath, 'utf-8'));
config.compression = 'zstd';
fs.writeFileSync(configPath, JSON.stringify(config, null, 2));
// Reopen with zstd config
const repo2 = await ContainerArchive.open(testRepoZstdPath);
// Ingest
const testData = Buffer.alloc(256 * 1024, 'zstd-compressed-data');
const snapshot = await repo2.ingest(stream.Readable.from(testData), {
tags: { compression: 'zstd' },
items: [{ name: 'zstd-data.bin' }],
});
expect(snapshot.newChunks).toBeGreaterThan(0);
// Restore and verify
const restoreStream = await repo2.restore(snapshot.id);
const chunks: Buffer[] = [];
await new Promise<void>((resolve, reject) => {
restoreStream.on('data', (chunk: Buffer) => chunks.push(chunk));
restoreStream.on('end', resolve);
restoreStream.on('error', reject);
});
const restored = Buffer.concat(chunks);
expect(restored.length).toEqual(testData.length);
expect(restored.equals(testData)).toBeTrue();
// Verify
const verifyResult = await repo2.verify({ level: 'full' });
expect(verifyResult.ok).toBeTrue();
await repo2.close();
});
export default tap.start();