This commit is contained in:
Juergen Kunz
2025-06-09 16:37:43 +00:00
parent 4c847fd3d7
commit fc09af9afd
10 changed files with 456 additions and 12 deletions

@@ -0,0 +1,150 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
import { SmartProxy, createHttpRoute } from '../ts/index.js';
import * as http from 'http';
tap.test('should not have memory leaks in long-running operations', async (tools) => {
// Get initial memory usage
const getMemoryUsage = () => {
if (global.gc) {
global.gc();
}
const usage = process.memoryUsage();
return {
heapUsed: Math.round(usage.heapUsed / 1024 / 1024), // MB
external: Math.round(usage.external / 1024 / 1024), // MB
rss: Math.round(usage.rss / 1024 / 1024) // MB
};
};
// Create a target server
const targetServer = http.createServer((req, res) => {
res.writeHead(200, { 'Content-Type': 'text/plain' });
res.end('OK');
});
await new Promise<void>((resolve) => targetServer.listen(3100, resolve));
// Create the proxy - use non-privileged port
const routes = [
createHttpRoute(['test1.local', 'test2.local', 'test3.local'], { host: 'localhost', port: 3100 }),
];
// Update route to use port 8080
routes[0].match.ports = 8080;
const proxy = new SmartProxy({
ports: [8080], // Use non-privileged port
routes: routes
});
await proxy.start();
console.log('Starting memory leak test...');
const initialMemory = getMemoryUsage();
console.log('Initial memory:', initialMemory);
// Function to make requests
const makeRequest = (domain: string): Promise<void> => {
return new Promise((resolve, reject) => {
const req = http.request({
hostname: 'localhost',
port: 8080,
path: '/',
method: 'GET',
headers: {
'Host': domain
}
}, (res) => {
res.on('data', () => {});
res.on('end', resolve);
});
req.on('error', reject);
req.end();
});
};
// Test 1: Many requests to the same routes
console.log('Test 1: Making 1000 requests to same routes...');
for (let i = 0; i < 1000; i++) {
await makeRequest(`test${(i % 3) + 1}.local`);
if (i % 100 === 0) {
console.log(` Progress: ${i}/1000`);
}
}
const afterSameRoutesMemory = getMemoryUsage();
console.log('Memory after same routes:', afterSameRoutesMemory);
// Test 2: Many requests to different routes (tests routeContextCache)
console.log('Test 2: Making 1000 requests to different routes...');
for (let i = 0; i < 1000; i++) {
// Create unique domain to test cache growth
await makeRequest(`test${i}.local`);
if (i % 100 === 0) {
console.log(` Progress: ${i}/1000`);
}
}
const afterDifferentRoutesMemory = getMemoryUsage();
console.log('Memory after different routes:', afterDifferentRoutesMemory);
// Test 3: Check metrics collector memory
console.log('Test 3: Checking metrics collector...');
const stats = proxy.getStats();
console.log(`Active connections: ${stats.getActiveConnections()}`);
console.log(`Total connections: ${stats.getTotalConnections()}`);
console.log(`RPS: ${stats.getRequestsPerSecond()}`);
// Test 4: Many rapid connections (tests requestTimestamps array)
console.log('Test 4: Making 10000 rapid requests...');
const rapidRequests = [];
for (let i = 0; i < 10000; i++) {
rapidRequests.push(makeRequest('test1.local'));
if (i % 1000 === 0) {
// Wait a bit to let some complete
await Promise.all(rapidRequests);
rapidRequests.length = 0;
console.log(` Progress: ${i}/10000`);
}
}
await Promise.all(rapidRequests);
const afterRapidMemory = getMemoryUsage();
console.log('Memory after rapid requests:', afterRapidMemory);
// Force garbage collection and check final memory
await new Promise(resolve => setTimeout(resolve, 1000));
const finalMemory = getMemoryUsage();
console.log('Final memory:', finalMemory);
// Memory leak checks
const memoryGrowth = finalMemory.heapUsed - initialMemory.heapUsed;
console.log(`Total memory growth: ${memoryGrowth} MB`);
// Check for excessive memory growth
// Allow some growth but not excessive (e.g., more than 50MB for this test)
expect(memoryGrowth).toBeLessThan(50);
// Check specific potential leaks
// 1. Route context cache should not grow unbounded
const routeHandler = proxy.routeConnectionHandler as any;
if (routeHandler.routeContextCache) {
console.log(`Route context cache size: ${routeHandler.routeContextCache.size}`);
// Should not have 1000 entries from different routes test
expect(routeHandler.routeContextCache.size).toBeLessThan(100);
}
// 2. Metrics collector should clean up old timestamps
const metricsCollector = (proxy.getStats() as any);
if (metricsCollector.requestTimestamps) {
console.log(`Request timestamps array length: ${metricsCollector.requestTimestamps.length}`);
// Should not exceed 10000 (the cleanup threshold)
expect(metricsCollector.requestTimestamps.length).toBeLessThanOrEqual(10000);
}
// Cleanup
await proxy.stop();
await new Promise<void>((resolve) => targetServer.close(resolve));
console.log('Memory leak test completed successfully');
});
// Run with: node --expose-gc test.memory-leak-check.node.ts
tap.start();

@@ -0,0 +1,58 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
import { SmartProxy, createHttpRoute } from '../ts/index.js';
import * as http from 'http';
tap.test('memory leak fixes verification', async () => {
// Test 1: MetricsCollector requestTimestamps cleanup
console.log('\n=== Test 1: MetricsCollector requestTimestamps cleanup ===');
const proxy = new SmartProxy({
ports: [8081],
routes: [
createHttpRoute('test.local', { host: 'localhost', port: 3200 }),
]
});
// Override route port
proxy.settings.routes[0].match.ports = 8081;
await proxy.start();
const metricsCollector = (proxy.getStats() as any);
// Check initial state
console.log('Initial timestamps:', metricsCollector.requestTimestamps.length);
// Simulate many requests to test cleanup
for (let i = 0; i < 6000; i++) {
metricsCollector.recordRequest();
}
// Should be cleaned up to MAX_TIMESTAMPS (5000)
console.log('After 6000 requests:', metricsCollector.requestTimestamps.length);
expect(metricsCollector.requestTimestamps.length).toBeLessThanOrEqual(5000);
await proxy.stop();
// Test 2: Verify intervals are cleaned up
console.log('\n=== Test 2: Verify cleanup methods exist ===');
// Check RequestHandler has destroy method
const { RequestHandler } = await import('../ts/proxies/http-proxy/request-handler.js');
const requestHandler = new RequestHandler({}, null as any);
expect(typeof requestHandler.destroy).toEqual('function');
console.log('✓ RequestHandler has destroy method');
// Check FunctionCache has destroy method
const { FunctionCache } = await import('../ts/proxies/http-proxy/function-cache.js');
const functionCache = new FunctionCache({ debug: () => {}, info: () => {} } as any);
expect(typeof functionCache.destroy).toEqual('function');
console.log('✓ FunctionCache has destroy method');
// Cleanup
requestHandler.destroy();
functionCache.destroy();
console.log('\n✅ All memory leak fixes verified!');
});
tap.start();

@@ -0,0 +1,131 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
tap.test('memory leak fixes - unit tests', async () => {
console.log('\n=== Testing MetricsCollector memory management ===');
// Import and test MetricsCollector directly
const { MetricsCollector } = await import('../ts/proxies/smart-proxy/metrics-collector.js');
// Create a mock SmartProxy with minimal required properties
const mockProxy = {
connectionManager: {
getConnectionCount: () => 0,
getConnections: () => new Map(),
getTerminationStats: () => ({ incoming: {} })
},
routeConnectionHandler: {
newConnectionSubject: {
subscribe: () => ({ unsubscribe: () => {} })
}
},
settings: {}
};
const collector = new MetricsCollector(mockProxy as any);
collector.start();
// Test timestamp cleanup
console.log('Testing requestTimestamps cleanup...');
// Add 6000 timestamps
for (let i = 0; i < 6000; i++) {
collector.recordRequest();
}
// Access private property for testing
let timestamps = (collector as any).requestTimestamps;
console.log(`Timestamps after 6000 requests: ${timestamps.length}`);
// Force one more request to trigger cleanup
collector.recordRequest();
timestamps = (collector as any).requestTimestamps;
console.log(`Timestamps after cleanup trigger: ${timestamps.length}`);
// Now check the RPS window - all timestamps are within 1 minute so they won't be cleaned
const now = Date.now();
const oldestTimestamp = Math.min(...timestamps);
const windowAge = now - oldestTimestamp;
console.log(`Window age: ${windowAge}ms (should be < 60000ms for all to be kept)`);
// Since all timestamps are recent (within RPS window), they won't be cleaned by window
// But the array size should still be limited
console.log(`MAX_TIMESTAMPS: ${(collector as any).MAX_TIMESTAMPS}`);
// The issue is our rapid-fire test - all timestamps are within the window
// Let's test with older timestamps
console.log('\nTesting with mixed old/new timestamps...');
(collector as any).requestTimestamps = [];
// Add some old timestamps (older than window)
const oldTime = now - 70000; // 70 seconds ago
for (let i = 0; i < 3000; i++) {
(collector as any).requestTimestamps.push(oldTime);
}
// Add new timestamps to exceed limit
for (let i = 0; i < 3000; i++) {
collector.recordRequest();
}
timestamps = (collector as any).requestTimestamps;
console.log(`After mixed timestamps: ${timestamps.length} (old ones should be cleaned)`);
// Old timestamps should be cleaned when we exceed MAX_TIMESTAMPS
expect(timestamps.length).toBeLessThanOrEqual(5000);
// Stop the collector
collector.stop();
console.log('\n=== Testing FunctionCache cleanup ===');
const { FunctionCache } = await import('../ts/proxies/http-proxy/function-cache.js');
const mockLogger = {
debug: () => {},
info: () => {},
warn: () => {},
error: () => {}
};
const cache = new FunctionCache(mockLogger as any);
// Check that cleanup interval was set
expect((cache as any).cleanupInterval).toBeTruthy();
// Test destroy method
cache.destroy();
// Cleanup interval should be cleared
expect((cache as any).cleanupInterval).toBeNull();
console.log('✓ FunctionCache properly cleans up interval');
console.log('\n=== Testing RequestHandler cleanup ===');
const { RequestHandler } = await import('../ts/proxies/http-proxy/request-handler.js');
const mockConnectionPool = {
getConnection: () => null,
releaseConnection: () => {}
};
const handler = new RequestHandler(
{ logLevel: 'error' },
mockConnectionPool as any
);
// Check that cleanup interval was set
expect((handler as any).rateLimitCleanupInterval).toBeTruthy();
// Test destroy method
handler.destroy();
// Cleanup interval should be cleared
expect((handler as any).rateLimitCleanupInterval).toBeNull();
console.log('✓ RequestHandler properly cleans up interval');
console.log('\n✅ All memory leak fixes verified!');
});
tap.start();