import { expect, tap } from '@git.zone/tstest/tapbundle';
import * as einvoice from '../../../ts/index.js';
import * as plugins from '../../plugins.js';
import { PerformanceTracker } from '../../helpers/performance.tracker.js';
tap.test('ERR-05: Memory/Resource Errors - Handle memory and resource constraints', async (t) => {
const performanceTracker = new PerformanceTracker('ERR-05');
await t.test('Memory allocation errors', async () => {
performanceTracker.startOperation('memory-allocation');
const memoryScenarios = [
{
name: 'Large XML parsing',
size: 50 * 1024 * 1024, // 50MB
operation: 'XML parsing',
expectedError: /memory|heap|allocation failed/i
},
{
name: 'Multiple concurrent operations',
concurrency: 100,
operation: 'Concurrent processing',
expectedError: /memory|resource|too many/i
},
{
name: 'Buffer overflow protection',
size: 100 * 1024 * 1024, // 100MB
operation: 'Buffer allocation',
expectedError: /buffer.*too large|memory limit|overflow/i
}
];
for (const scenario of memoryScenarios) {
const startTime = performance.now();
try {
if (scenario.name === 'Large XML parsing') {
// Simulate large XML that could cause memory issues
const largeXml = '' + 'x'.repeat(scenario.size) + '';
// Check memory usage before attempting parse
const memUsage = process.memoryUsage();
if (memUsage.heapUsed + scenario.size > memUsage.heapTotal * 0.9) {
throw new Error('Insufficient memory for XML parsing operation');
}
} else if (scenario.name === 'Buffer overflow protection') {
// Simulate buffer size check
const MAX_BUFFER_SIZE = 50 * 1024 * 1024; // 50MB limit
if (scenario.size > MAX_BUFFER_SIZE) {
throw new Error(`Buffer size ${scenario.size} exceeds maximum allowed size of ${MAX_BUFFER_SIZE}`);
}
}
} catch (error) {
expect(error).toBeTruthy();
expect(error.message.toLowerCase()).toMatch(scenario.expectedError);
console.log(`✓ ${scenario.name}: ${error.message}`);
}
performanceTracker.recordMetric('memory-error-handling', performance.now() - startTime);
}
performanceTracker.endOperation('memory-allocation');
});
await t.test('Resource exhaustion handling', async () => {
performanceTracker.startOperation('resource-exhaustion');
class ResourcePool {
private available: number;
private inUse = 0;
private waitQueue: Array<(value: any) => void> = [];
constructor(private maxResources: number) {
this.available = maxResources;
}
async acquire(): Promise<{ id: number; release: () => void }> {
if (this.available > 0) {
this.available--;
this.inUse++;
const resourceId = this.inUse;
return {
id: resourceId,
release: () => this.release()
};
}
// Resource exhausted - wait or throw
if (this.waitQueue.length > 10) {
throw new Error('Resource pool exhausted - too many pending requests');
}
return new Promise((resolve) => {
this.waitQueue.push(resolve);
});
}
private release(): void {
this.available++;
this.inUse--;
if (this.waitQueue.length > 0) {
const waiting = this.waitQueue.shift();
waiting(this.acquire());
}
}
getStatus() {
return {
available: this.available,
inUse: this.inUse,
waiting: this.waitQueue.length
};
}
}
const pool = new ResourcePool(5);
const acquiredResources = [];
// Acquire all resources
for (let i = 0; i < 5; i++) {
const resource = await pool.acquire();
acquiredResources.push(resource);
console.log(` Acquired resource ${resource.id}`);
}
console.log(` Pool status:`, pool.getStatus());
// Try to acquire when exhausted
try {
// Create many waiting requests
const promises = [];
for (let i = 0; i < 15; i++) {
promises.push(pool.acquire());
}
await Promise.race([
Promise.all(promises),
new Promise((_, reject) => setTimeout(() => reject(new Error('Resource pool exhausted')), 100))
]);
} catch (error) {
expect(error.message).toMatch(/resource pool exhausted/i);
console.log(`✓ Resource exhaustion detected: ${error.message}`);
}
// Release resources
for (const resource of acquiredResources) {
resource.release();
}
performanceTracker.endOperation('resource-exhaustion');
});
await t.test('File handle management', async () => {
performanceTracker.startOperation('file-handles');
class FileHandleManager {
private openHandles = new Map();
private readonly maxHandles = 100;
async open(filename: string): Promise {
if (this.openHandles.size >= this.maxHandles) {
// Try to close least recently used
const lru = this.openHandles.keys().next().value;
if (lru) {
await this.close(lru);
console.log(` Auto-closed LRU file: ${lru}`);
} else {
throw new Error(`Too many open files (${this.maxHandles} limit reached)`);
}
}
// Simulate file open
const handle = {
filename,
opened: Date.now(),
read: async () => `Content of ${filename}`
};
this.openHandles.set(filename, handle);
return handle;
}
async close(filename: string): Promise {
if (this.openHandles.has(filename)) {
this.openHandles.delete(filename);
}
}
async closeAll(): Promise {
for (const filename of this.openHandles.keys()) {
await this.close(filename);
}
}
getOpenCount(): number {
return this.openHandles.size;
}
}
const fileManager = new FileHandleManager();
// Test normal operations
for (let i = 0; i < 50; i++) {
await fileManager.open(`file${i}.xml`);
}
console.log(` Opened ${fileManager.getOpenCount()} files`);
// Test approaching limit
for (let i = 50; i < 100; i++) {
await fileManager.open(`file${i}.xml`);
}
console.log(` At limit: ${fileManager.getOpenCount()} files`);
// Test exceeding limit (should auto-close LRU)
await fileManager.open('file100.xml');
console.log(` After LRU eviction: ${fileManager.getOpenCount()} files`);
// Clean up
await fileManager.closeAll();
expect(fileManager.getOpenCount()).toEqual(0);
console.log('✓ File handle management working correctly');
performanceTracker.endOperation('file-handles');
});
await t.test('Memory leak detection', async () => {
performanceTracker.startOperation('memory-leak-detection');
class MemoryMonitor {
private samples: Array<{ time: number; usage: NodeJS.MemoryUsage }> = [];
private leakThreshold = 10 * 1024 * 1024; // 10MB
recordSample(): void {
this.samples.push({
time: Date.now(),
usage: process.memoryUsage()
});
// Keep only recent samples
if (this.samples.length > 10) {
this.samples.shift();
}
}
detectLeak(): { isLeaking: boolean; growth?: number; message?: string } {
if (this.samples.length < 3) {
return { isLeaking: false };
}
const first = this.samples[0];
const last = this.samples[this.samples.length - 1];
const heapGrowth = last.usage.heapUsed - first.usage.heapUsed;
if (heapGrowth > this.leakThreshold) {
return {
isLeaking: true,
growth: heapGrowth,
message: `Potential memory leak detected: ${Math.round(heapGrowth / 1024 / 1024)}MB heap growth`
};
}
return { isLeaking: false, growth: heapGrowth };
}
getReport(): string {
const current = process.memoryUsage();
return [
`Memory Usage Report:`,
` Heap Used: ${Math.round(current.heapUsed / 1024 / 1024)}MB`,
` Heap Total: ${Math.round(current.heapTotal / 1024 / 1024)}MB`,
` RSS: ${Math.round(current.rss / 1024 / 1024)}MB`,
` Samples: ${this.samples.length}`
].join('\n');
}
}
const monitor = new MemoryMonitor();
// Simulate operations that might leak memory
const operations = [];
for (let i = 0; i < 5; i++) {
monitor.recordSample();
// Simulate memory usage
const data = new Array(1000).fill('x'.repeat(1000));
operations.push(data);
// Small delay
await new Promise(resolve => setTimeout(resolve, 10));
}
const leakCheck = monitor.detectLeak();
console.log(monitor.getReport());
if (leakCheck.isLeaking) {
console.log(`⚠️ ${leakCheck.message}`);
} else {
console.log(`✓ No memory leak detected (growth: ${Math.round(leakCheck.growth / 1024)}KB)`);
}
performanceTracker.endOperation('memory-leak-detection');
});
await t.test('Stream processing for large files', async () => {
performanceTracker.startOperation('stream-processing');
class StreamProcessor {
async processLargeXml(stream: any, options: { chunkSize?: number } = {}): Promise {
const chunkSize = options.chunkSize || 16 * 1024; // 16KB chunks
let processedBytes = 0;
let chunkCount = 0;
return new Promise((resolve, reject) => {
const chunks: Buffer[] = [];
// Simulate stream processing
const processChunk = (chunk: Buffer) => {
processedBytes += chunk.length;
chunkCount++;
// Check memory pressure
const memUsage = process.memoryUsage();
if (memUsage.heapUsed > memUsage.heapTotal * 0.8) {
reject(new Error('Memory pressure too high during stream processing'));
return false;
}
// Process chunk (e.g., partial XML parsing)
chunks.push(chunk);
// Limit buffered chunks
if (chunks.length > 100) {
chunks.shift(); // Remove oldest
}
return true;
};
// Simulate streaming
const simulateStream = () => {
for (let i = 0; i < 10; i++) {
const chunk = Buffer.alloc(chunkSize, 'x');
if (!processChunk(chunk)) {
return;
}
}
console.log(` Processed ${chunkCount} chunks (${Math.round(processedBytes / 1024)}KB)`);
resolve();
};
simulateStream();
});
}
}
const processor = new StreamProcessor();
try {
await processor.processLargeXml({}, { chunkSize: 8 * 1024 });
console.log('✓ Stream processing completed successfully');
} catch (error) {
console.log(`✗ Stream processing failed: ${error.message}`);
}
performanceTracker.endOperation('stream-processing');
});
await t.test('Resource cleanup patterns', async () => {
performanceTracker.startOperation('resource-cleanup');
class ResourceManager {
private cleanupHandlers: Array<() => Promise> = [];
register(cleanup: () => Promise): void {
this.cleanupHandlers.push(cleanup);
}
async executeWithCleanup(operation: () => Promise): Promise {
try {
return await operation();
} finally {
// Always cleanup, even on error
for (const handler of this.cleanupHandlers.reverse()) {
try {
await handler();
} catch (cleanupError) {
console.error(` Cleanup error: ${cleanupError.message}`);
}
}
this.cleanupHandlers = [];
}
}
}
const manager = new ResourceManager();
// Register cleanup handlers
manager.register(async () => {
console.log(' Closing file handles...');
});
manager.register(async () => {
console.log(' Releasing memory buffers...');
});
manager.register(async () => {
console.log(' Clearing temporary files...');
});
// Test successful operation
try {
await manager.executeWithCleanup(async () => {
console.log(' Executing operation...');
return 'Success';
});
console.log('✓ Operation with cleanup completed');
} catch (error) {
console.log(`✗ Operation failed: ${error.message}`);
}
// Test failed operation (cleanup should still run)
try {
await manager.executeWithCleanup(async () => {
console.log(' Executing failing operation...');
throw new Error('Operation failed');
});
} catch (error) {
console.log('✓ Cleanup ran despite error');
}
performanceTracker.endOperation('resource-cleanup');
});
await t.test('Memory usage optimization strategies', async () => {
performanceTracker.startOperation('memory-optimization');
const optimizationStrategies = [
{
name: 'Lazy loading',
description: 'Load data only when needed',
implementation: () => {
let _data: any = null;
return {
get data() {
if (!_data) {
console.log(' Loading data on first access...');
_data = { loaded: true };
}
return _data;
}
};
}
},
{
name: 'Object pooling',
description: 'Reuse objects instead of creating new ones',
implementation: () => {
const pool: any[] = [];
return {
acquire: () => pool.pop() || { reused: false },
release: (obj: any) => {
obj.reused = true;
pool.push(obj);
}
};
}
},
{
name: 'Weak references',
description: 'Allow garbage collection of cached objects',
implementation: () => {
const cache = new WeakMap();
return {
set: (key: object, value: any) => cache.set(key, value),
get: (key: object) => cache.get(key)
};
}
}
];
for (const strategy of optimizationStrategies) {
console.log(`\n Testing ${strategy.name}:`);
console.log(` ${strategy.description}`);
const impl = strategy.implementation();
if (strategy.name === 'Lazy loading') {
// Access data multiple times
const obj = impl as any;
obj.data; // First access
obj.data; // Second access (no reload)
} else if (strategy.name === 'Object pooling') {
const pool = impl as any;
const obj1 = pool.acquire();
console.log(` First acquire: reused=${obj1.reused}`);
pool.release(obj1);
const obj2 = pool.acquire();
console.log(` Second acquire: reused=${obj2.reused}`);
}
console.log(` ✓ ${strategy.name} implemented`);
}
performanceTracker.endOperation('memory-optimization');
});
// Performance summary
console.log('\n' + performanceTracker.getSummary());
// Memory error handling best practices
console.log('\nMemory/Resource Error Handling Best Practices:');
console.log('1. Implement resource pooling for frequently used objects');
console.log('2. Use streaming for large file processing');
console.log('3. Monitor memory usage and implement early warning systems');
console.log('4. Always clean up resources in finally blocks');
console.log('5. Set reasonable limits on buffer sizes and concurrent operations');
console.log('6. Implement graceful degradation when resources are constrained');
console.log('7. Use weak references for caches that can be garbage collected');
});
tap.start();