533 lines
17 KiB
TypeScript
533 lines
17 KiB
TypeScript
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||
|
import * as einvoice from '../../../ts/index.js';
|
||
|
import * as plugins from '../../plugins.js';
|
||
|
import { PerformanceTracker } from '../../helpers/performance.tracker.js';
|
||
|
|
||
|
tap.test('ERR-08: File System Errors - Handle file I/O failures gracefully', async (t) => {
|
||
|
const performanceTracker = new PerformanceTracker('ERR-08');
|
||
|
const testDir = '.nogit/filesystem-errors';
|
||
|
|
||
|
await t.test('File permission errors', async () => {
|
||
|
performanceTracker.startOperation('permission-errors');
|
||
|
|
||
|
await plugins.fs.ensureDir(testDir);
|
||
|
|
||
|
const permissionTests = [
|
||
|
{
|
||
|
name: 'Read-only file write attempt',
|
||
|
setup: async () => {
|
||
|
const filePath = plugins.path.join(testDir, 'readonly.xml');
|
||
|
await plugins.fs.writeFile(filePath, '<invoice></invoice>');
|
||
|
await plugins.fs.chmod(filePath, 0o444); // Read-only
|
||
|
return filePath;
|
||
|
},
|
||
|
operation: async (filePath: string) => {
|
||
|
await plugins.fs.writeFile(filePath, '<invoice>Updated</invoice>');
|
||
|
},
|
||
|
expectedError: /permission|read.?only|access denied/i,
|
||
|
cleanup: async (filePath: string) => {
|
||
|
await plugins.fs.chmod(filePath, 0o644); // Restore permissions
|
||
|
await plugins.fs.remove(filePath);
|
||
|
}
|
||
|
},
|
||
|
{
|
||
|
name: 'No execute permission on directory',
|
||
|
setup: async () => {
|
||
|
const dirPath = plugins.path.join(testDir, 'no-exec');
|
||
|
await plugins.fs.ensureDir(dirPath);
|
||
|
await plugins.fs.chmod(dirPath, 0o644); // No execute permission
|
||
|
return dirPath;
|
||
|
},
|
||
|
operation: async (dirPath: string) => {
|
||
|
await plugins.fs.readdir(dirPath);
|
||
|
},
|
||
|
expectedError: /permission|access denied|cannot read/i,
|
||
|
cleanup: async (dirPath: string) => {
|
||
|
await plugins.fs.chmod(dirPath, 0o755); // Restore permissions
|
||
|
await plugins.fs.remove(dirPath);
|
||
|
}
|
||
|
}
|
||
|
];
|
||
|
|
||
|
for (const test of permissionTests) {
|
||
|
const startTime = performance.now();
|
||
|
let resource: string | null = null;
|
||
|
|
||
|
try {
|
||
|
resource = await test.setup();
|
||
|
await test.operation(resource);
|
||
|
console.log(`✗ ${test.name}: Operation succeeded when it should have failed`);
|
||
|
} catch (error) {
|
||
|
expect(error).toBeTruthy();
|
||
|
expect(error.message.toLowerCase()).toMatch(test.expectedError);
|
||
|
console.log(`✓ ${test.name}: ${error.message}`);
|
||
|
} finally {
|
||
|
if (resource && test.cleanup) {
|
||
|
try {
|
||
|
await test.cleanup(resource);
|
||
|
} catch (cleanupError) {
|
||
|
console.log(` Cleanup warning: ${cleanupError.message}`);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
performanceTracker.recordMetric('permission-test', performance.now() - startTime);
|
||
|
}
|
||
|
|
||
|
performanceTracker.endOperation('permission-errors');
|
||
|
});
|
||
|
|
||
|
await t.test('Disk space errors', async () => {
|
||
|
performanceTracker.startOperation('disk-space');
|
||
|
|
||
|
class DiskSpaceSimulator {
|
||
|
private usedSpace = 0;
|
||
|
private readonly totalSpace = 1024 * 1024 * 100; // 100MB
|
||
|
private readonly reservedSpace = 1024 * 1024 * 10; // 10MB reserved
|
||
|
|
||
|
async checkSpace(requiredBytes: number): Promise<void> {
|
||
|
const availableSpace = this.totalSpace - this.usedSpace - this.reservedSpace;
|
||
|
|
||
|
if (requiredBytes > availableSpace) {
|
||
|
throw new Error(`Insufficient disk space: ${requiredBytes} bytes required, ${availableSpace} bytes available`);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
async allocate(bytes: number): Promise<void> {
|
||
|
await this.checkSpace(bytes);
|
||
|
this.usedSpace += bytes;
|
||
|
}
|
||
|
|
||
|
free(bytes: number): void {
|
||
|
this.usedSpace = Math.max(0, this.usedSpace - bytes);
|
||
|
}
|
||
|
|
||
|
getStats() {
|
||
|
return {
|
||
|
total: this.totalSpace,
|
||
|
used: this.usedSpace,
|
||
|
available: this.totalSpace - this.usedSpace - this.reservedSpace,
|
||
|
percentUsed: Math.round((this.usedSpace / this.totalSpace) * 100)
|
||
|
};
|
||
|
}
|
||
|
}
|
||
|
|
||
|
const diskSimulator = new DiskSpaceSimulator();
|
||
|
|
||
|
const spaceTests = [
|
||
|
{
|
||
|
name: 'Large file write',
|
||
|
size: 1024 * 1024 * 50, // 50MB
|
||
|
shouldSucceed: true
|
||
|
},
|
||
|
{
|
||
|
name: 'Exceeding available space',
|
||
|
size: 1024 * 1024 * 200, // 200MB
|
||
|
shouldSucceed: false
|
||
|
},
|
||
|
{
|
||
|
name: 'Multiple small files',
|
||
|
count: 100,
|
||
|
size: 1024 * 100, // 100KB each
|
||
|
shouldSucceed: true
|
||
|
}
|
||
|
];
|
||
|
|
||
|
for (const test of spaceTests) {
|
||
|
const startTime = performance.now();
|
||
|
|
||
|
try {
|
||
|
if (test.count) {
|
||
|
// Multiple files
|
||
|
for (let i = 0; i < test.count; i++) {
|
||
|
await diskSimulator.allocate(test.size);
|
||
|
}
|
||
|
console.log(`✓ ${test.name}: Allocated ${test.count} files of ${test.size} bytes each`);
|
||
|
} else {
|
||
|
// Single file
|
||
|
await diskSimulator.allocate(test.size);
|
||
|
console.log(`✓ ${test.name}: Allocated ${test.size} bytes`);
|
||
|
}
|
||
|
|
||
|
if (!test.shouldSucceed) {
|
||
|
console.log(` ✗ Should have failed due to insufficient space`);
|
||
|
}
|
||
|
} catch (error) {
|
||
|
if (!test.shouldSucceed) {
|
||
|
console.log(`✓ ${test.name}: Correctly failed - ${error.message}`);
|
||
|
} else {
|
||
|
console.log(`✗ ${test.name}: Unexpected failure - ${error.message}`);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
console.log(` Disk stats:`, diskSimulator.getStats());
|
||
|
|
||
|
performanceTracker.recordMetric('disk-space-test', performance.now() - startTime);
|
||
|
}
|
||
|
|
||
|
performanceTracker.endOperation('disk-space');
|
||
|
});
|
||
|
|
||
|
await t.test('File locking errors', async () => {
|
||
|
performanceTracker.startOperation('file-locking');
|
||
|
|
||
|
class FileLock {
|
||
|
private locks = new Map<string, { pid: number; acquired: Date; exclusive: boolean }>();
|
||
|
|
||
|
async acquireLock(filepath: string, exclusive = true): Promise<void> {
|
||
|
const existingLock = this.locks.get(filepath);
|
||
|
|
||
|
if (existingLock) {
|
||
|
if (existingLock.exclusive || exclusive) {
|
||
|
throw new Error(`File is locked by process ${existingLock.pid} since ${existingLock.acquired.toISOString()}`);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
this.locks.set(filepath, {
|
||
|
pid: process.pid,
|
||
|
acquired: new Date(),
|
||
|
exclusive
|
||
|
});
|
||
|
}
|
||
|
|
||
|
releaseLock(filepath: string): void {
|
||
|
this.locks.delete(filepath);
|
||
|
}
|
||
|
|
||
|
isLocked(filepath: string): boolean {
|
||
|
return this.locks.has(filepath);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
const fileLock = new FileLock();
|
||
|
const testFile = 'invoice.xml';
|
||
|
|
||
|
// Test exclusive lock
|
||
|
try {
|
||
|
await fileLock.acquireLock(testFile, true);
|
||
|
console.log('✓ Acquired exclusive lock');
|
||
|
|
||
|
// Try to acquire again
|
||
|
try {
|
||
|
await fileLock.acquireLock(testFile, false);
|
||
|
console.log('✗ Should not be able to acquire lock on exclusively locked file');
|
||
|
} catch (error) {
|
||
|
console.log(`✓ Lock conflict detected: ${error.message}`);
|
||
|
}
|
||
|
|
||
|
fileLock.releaseLock(testFile);
|
||
|
console.log('✓ Released lock');
|
||
|
} catch (error) {
|
||
|
console.log(`✗ Failed to acquire initial lock: ${error.message}`);
|
||
|
}
|
||
|
|
||
|
// Test shared locks
|
||
|
try {
|
||
|
await fileLock.acquireLock(testFile, false);
|
||
|
console.log('✓ Acquired shared lock');
|
||
|
|
||
|
await fileLock.acquireLock(testFile, false);
|
||
|
console.log('✓ Acquired second shared lock');
|
||
|
|
||
|
try {
|
||
|
await fileLock.acquireLock(testFile, true);
|
||
|
console.log('✗ Should not be able to acquire exclusive lock on shared file');
|
||
|
} catch (error) {
|
||
|
console.log(`✓ Exclusive lock blocked: ${error.message}`);
|
||
|
}
|
||
|
} catch (error) {
|
||
|
console.log(`✗ Shared lock test failed: ${error.message}`);
|
||
|
}
|
||
|
|
||
|
performanceTracker.endOperation('file-locking');
|
||
|
});
|
||
|
|
||
|
await t.test('Path-related errors', async () => {
|
||
|
performanceTracker.startOperation('path-errors');
|
||
|
|
||
|
const pathTests = [
|
||
|
{
|
||
|
name: 'Path too long',
|
||
|
path: 'a'.repeat(300) + '.xml',
|
||
|
expectedError: /path.*too long|name too long/i
|
||
|
},
|
||
|
{
|
||
|
name: 'Invalid characters',
|
||
|
path: 'invoice<>:|?.xml',
|
||
|
expectedError: /invalid.*character|illegal character/i
|
||
|
},
|
||
|
{
|
||
|
name: 'Reserved filename (Windows)',
|
||
|
path: 'CON.xml',
|
||
|
expectedError: /reserved|invalid.*name/i
|
||
|
},
|
||
|
{
|
||
|
name: 'Directory traversal attempt',
|
||
|
path: '../../../etc/passwd',
|
||
|
expectedError: /invalid path|security|traversal/i
|
||
|
},
|
||
|
{
|
||
|
name: 'Null bytes in path',
|
||
|
path: 'invoice\x00.xml',
|
||
|
expectedError: /invalid|null/i
|
||
|
}
|
||
|
];
|
||
|
|
||
|
for (const test of pathTests) {
|
||
|
const startTime = performance.now();
|
||
|
|
||
|
try {
|
||
|
// Validate path
|
||
|
if (test.path.length > 255) {
|
||
|
throw new Error('Path too long');
|
||
|
}
|
||
|
|
||
|
if (/[<>:|?*]/.test(test.path)) {
|
||
|
throw new Error('Invalid characters in path');
|
||
|
}
|
||
|
|
||
|
if (/^(CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9])(\.|$)/i.test(test.path)) {
|
||
|
throw new Error('Reserved filename');
|
||
|
}
|
||
|
|
||
|
if (test.path.includes('..')) {
|
||
|
throw new Error('Directory traversal detected');
|
||
|
}
|
||
|
|
||
|
if (test.path.includes('\x00')) {
|
||
|
throw new Error('Null byte in path');
|
||
|
}
|
||
|
|
||
|
console.log(`✗ ${test.name}: Path validation passed when it should have failed`);
|
||
|
} catch (error) {
|
||
|
expect(error.message.toLowerCase()).toMatch(test.expectedError);
|
||
|
console.log(`✓ ${test.name}: ${error.message}`);
|
||
|
}
|
||
|
|
||
|
performanceTracker.recordMetric('path-validation', performance.now() - startTime);
|
||
|
}
|
||
|
|
||
|
performanceTracker.endOperation('path-errors');
|
||
|
});
|
||
|
|
||
|
await t.test('File handle exhaustion', async () => {
|
||
|
performanceTracker.startOperation('handle-exhaustion');
|
||
|
|
||
|
const tempFiles: string[] = [];
|
||
|
const maxHandles = 20;
|
||
|
const handles: any[] = [];
|
||
|
|
||
|
try {
|
||
|
// Create temp files
|
||
|
for (let i = 0; i < maxHandles; i++) {
|
||
|
const filePath = plugins.path.join(testDir, `temp${i}.xml`);
|
||
|
await plugins.fs.writeFile(filePath, `<invoice id="${i}"></invoice>`);
|
||
|
tempFiles.push(filePath);
|
||
|
}
|
||
|
|
||
|
// Open many file handles without closing
|
||
|
for (let i = 0; i < maxHandles; i++) {
|
||
|
try {
|
||
|
const handle = await plugins.fs.open(tempFiles[i], 'r');
|
||
|
handles.push(handle);
|
||
|
} catch (error) {
|
||
|
console.log(`✓ File handle limit reached at ${i} handles: ${error.message}`);
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (handles.length === maxHandles) {
|
||
|
console.log(`⚠️ Opened ${maxHandles} handles without hitting limit`);
|
||
|
}
|
||
|
|
||
|
} finally {
|
||
|
// Cleanup: close handles
|
||
|
for (const handle of handles) {
|
||
|
try {
|
||
|
await handle.close();
|
||
|
} catch (e) {
|
||
|
// Ignore close errors
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// Cleanup: remove temp files
|
||
|
for (const file of tempFiles) {
|
||
|
try {
|
||
|
await plugins.fs.remove(file);
|
||
|
} catch (e) {
|
||
|
// Ignore removal errors
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
performanceTracker.endOperation('handle-exhaustion');
|
||
|
});
|
||
|
|
||
|
await t.test('Atomicity and transaction errors', async () => {
|
||
|
performanceTracker.startOperation('atomicity');
|
||
|
|
||
|
class AtomicFileWriter {
|
||
|
async writeAtomic(filepath: string, content: string): Promise<void> {
|
||
|
const tempPath = `${filepath}.tmp.${process.pid}.${Date.now()}`;
|
||
|
|
||
|
try {
|
||
|
// Write to temp file
|
||
|
await plugins.fs.writeFile(tempPath, content);
|
||
|
|
||
|
// Simulate validation
|
||
|
const written = await plugins.fs.readFile(tempPath, 'utf8');
|
||
|
if (written !== content) {
|
||
|
throw new Error('Content verification failed');
|
||
|
}
|
||
|
|
||
|
// Atomic rename
|
||
|
await plugins.fs.rename(tempPath, filepath);
|
||
|
console.log(`✓ Atomic write completed for ${filepath}`);
|
||
|
|
||
|
} catch (error) {
|
||
|
// Cleanup on error
|
||
|
try {
|
||
|
await plugins.fs.remove(tempPath);
|
||
|
} catch (cleanupError) {
|
||
|
// Ignore cleanup errors
|
||
|
}
|
||
|
throw new Error(`Atomic write failed: ${error.message}`);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
async transactionalUpdate(files: Array<{ path: string; content: string }>): Promise<void> {
|
||
|
const backups: Array<{ path: string; backup: string }> = [];
|
||
|
|
||
|
try {
|
||
|
// Create backups
|
||
|
for (const file of files) {
|
||
|
if (await plugins.fs.pathExists(file.path)) {
|
||
|
const backup = await plugins.fs.readFile(file.path, 'utf8');
|
||
|
backups.push({ path: file.path, backup });
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// Update all files
|
||
|
for (const file of files) {
|
||
|
await this.writeAtomic(file.path, file.content);
|
||
|
}
|
||
|
|
||
|
console.log(`✓ Transaction completed: ${files.length} files updated`);
|
||
|
|
||
|
} catch (error) {
|
||
|
// Rollback on error
|
||
|
console.log(`✗ Transaction failed, rolling back: ${error.message}`);
|
||
|
|
||
|
for (const backup of backups) {
|
||
|
try {
|
||
|
await plugins.fs.writeFile(backup.path, backup.backup);
|
||
|
console.log(` Rolled back ${backup.path}`);
|
||
|
} catch (rollbackError) {
|
||
|
console.error(` Failed to rollback ${backup.path}: ${rollbackError.message}`);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
throw error;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
const atomicWriter = new AtomicFileWriter();
|
||
|
const testFilePath = plugins.path.join(testDir, 'atomic-test.xml');
|
||
|
|
||
|
// Test successful atomic write
|
||
|
await atomicWriter.writeAtomic(testFilePath, '<invoice>Atomic content</invoice>');
|
||
|
|
||
|
// Test transactional update
|
||
|
const transactionFiles = [
|
||
|
{ path: plugins.path.join(testDir, 'trans1.xml'), content: '<invoice id="1"></invoice>' },
|
||
|
{ path: plugins.path.join(testDir, 'trans2.xml'), content: '<invoice id="2"></invoice>' }
|
||
|
];
|
||
|
|
||
|
try {
|
||
|
await atomicWriter.transactionalUpdate(transactionFiles);
|
||
|
} catch (error) {
|
||
|
console.log(`Transaction test: ${error.message}`);
|
||
|
}
|
||
|
|
||
|
// Cleanup
|
||
|
await plugins.fs.remove(testFilePath);
|
||
|
for (const file of transactionFiles) {
|
||
|
try {
|
||
|
await plugins.fs.remove(file.path);
|
||
|
} catch (e) {
|
||
|
// Ignore
|
||
|
}
|
||
|
}
|
||
|
|
||
|
performanceTracker.endOperation('atomicity');
|
||
|
});
|
||
|
|
||
|
await t.test('Network file system errors', async () => {
|
||
|
performanceTracker.startOperation('network-fs');
|
||
|
|
||
|
const networkErrors = [
|
||
|
{
|
||
|
name: 'Network timeout',
|
||
|
error: 'ETIMEDOUT',
|
||
|
message: 'Network operation timed out'
|
||
|
},
|
||
|
{
|
||
|
name: 'Connection lost',
|
||
|
error: 'ECONNRESET',
|
||
|
message: 'Connection reset by peer'
|
||
|
},
|
||
|
{
|
||
|
name: 'Stale NFS handle',
|
||
|
error: 'ESTALE',
|
||
|
message: 'Stale NFS file handle'
|
||
|
},
|
||
|
{
|
||
|
name: 'Remote I/O error',
|
||
|
error: 'EREMOTEIO',
|
||
|
message: 'Remote I/O error'
|
||
|
}
|
||
|
];
|
||
|
|
||
|
for (const netError of networkErrors) {
|
||
|
const startTime = performance.now();
|
||
|
|
||
|
try {
|
||
|
// Simulate network file system error
|
||
|
const error = new Error(netError.message);
|
||
|
(error as any).code = netError.error;
|
||
|
throw error;
|
||
|
} catch (error) {
|
||
|
expect(error).toBeTruthy();
|
||
|
console.log(`✓ ${netError.name}: Simulated ${error.code} - ${error.message}`);
|
||
|
}
|
||
|
|
||
|
performanceTracker.recordMetric('network-fs-error', performance.now() - startTime);
|
||
|
}
|
||
|
|
||
|
performanceTracker.endOperation('network-fs');
|
||
|
});
|
||
|
|
||
|
// Cleanup test directory
|
||
|
try {
|
||
|
await plugins.fs.remove(testDir);
|
||
|
} catch (e) {
|
||
|
console.log('Warning: Could not clean up test directory');
|
||
|
}
|
||
|
|
||
|
// Performance summary
|
||
|
console.log('\n' + performanceTracker.getSummary());
|
||
|
|
||
|
// File system error handling best practices
|
||
|
console.log('\nFile System Error Handling Best Practices:');
|
||
|
console.log('1. Always check file permissions before operations');
|
||
|
console.log('2. Implement atomic writes using temp files and rename');
|
||
|
console.log('3. Handle disk space exhaustion gracefully');
|
||
|
console.log('4. Use file locking to prevent concurrent access issues');
|
||
|
console.log('5. Validate paths to prevent security vulnerabilities');
|
||
|
console.log('6. Implement retry logic for transient network FS errors');
|
||
|
console.log('7. Always clean up temp files and file handles');
|
||
|
console.log('8. Use transactions for multi-file updates');
|
||
|
});
|
||
|
|
||
|
tap.start();
|