feat(metrics): add real-time throughput sampling and byte-counting metrics
This commit is contained in:
636
test/test.throughput.ts
Normal file
636
test/test.throughput.ts
Normal file
@@ -0,0 +1,636 @@
|
||||
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||
import { SmartProxy } from '../ts/index.js';
|
||||
import type { IRouteConfig } from '../ts/index.js';
|
||||
import * as net from 'net';
|
||||
import * as http from 'http';
|
||||
import * as tls from 'tls';
|
||||
import * as https from 'https';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
// ────────────────────────────────────────────────────────────────────────────
|
||||
// Port assignments (unique to avoid conflicts with other tests)
|
||||
// ────────────────────────────────────────────────────────────────────────────
|
||||
const TCP_ECHO_PORT = 47500;
|
||||
const HTTP_ECHO_PORT = 47501;
|
||||
const TLS_ECHO_PORT = 47502;
|
||||
const PROXY_TCP_PORT = 47510;
|
||||
const PROXY_HTTP_PORT = 47511;
|
||||
const PROXY_TLS_PASS_PORT = 47512;
|
||||
const PROXY_TLS_TERM_PORT = 47513;
|
||||
const PROXY_SOCKET_PORT = 47514;
|
||||
const PROXY_MULTI_A_PORT = 47515;
|
||||
const PROXY_MULTI_B_PORT = 47516;
|
||||
const PROXY_TP_HTTP_PORT = 47517;
|
||||
|
||||
// ────────────────────────────────────────────────────────────────────────────
|
||||
// Test certificates
|
||||
// ────────────────────────────────────────────────────────────────────────────
|
||||
const CERT_PEM = fs.readFileSync(path.join(__dirname, '..', 'assets', 'certs', 'cert.pem'), 'utf8');
|
||||
const KEY_PEM = fs.readFileSync(path.join(__dirname, '..', 'assets', 'certs', 'key.pem'), 'utf8');
|
||||
|
||||
// ────────────────────────────────────────────────────────────────────────────
|
||||
// Backend servers
|
||||
// ────────────────────────────────────────────────────────────────────────────
|
||||
let tcpEchoServer: net.Server;
|
||||
let httpEchoServer: http.Server;
|
||||
let tlsEchoServer: tls.Server;
|
||||
|
||||
// Helper: force-poll the metrics adapter
|
||||
async function pollMetrics(proxy: SmartProxy): Promise<void> {
|
||||
await (proxy as any).metricsAdapter.poll();
|
||||
}
|
||||
|
||||
// ════════════════════════════════════════════════════════════════════════════
|
||||
// Setup: backend servers
|
||||
// ════════════════════════════════════════════════════════════════════════════
|
||||
tap.test('setup - TCP echo server', async () => {
|
||||
tcpEchoServer = net.createServer((socket) => {
|
||||
socket.on('data', (data) => socket.write(data));
|
||||
socket.on('error', () => {});
|
||||
});
|
||||
await new Promise<void>((resolve) => {
|
||||
tcpEchoServer.listen(TCP_ECHO_PORT, () => {
|
||||
console.log(`TCP echo server on port ${TCP_ECHO_PORT}`);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
tap.test('setup - HTTP echo server', async () => {
|
||||
httpEchoServer = http.createServer((req, res) => {
|
||||
let body = '';
|
||||
req.on('data', (chunk) => (body += chunk));
|
||||
req.on('end', () => {
|
||||
res.writeHead(200, { 'Content-Type': 'text/plain' });
|
||||
res.end(`echo:${body}`);
|
||||
});
|
||||
});
|
||||
await new Promise<void>((resolve) => {
|
||||
httpEchoServer.listen(HTTP_ECHO_PORT, () => {
|
||||
console.log(`HTTP echo server on port ${HTTP_ECHO_PORT}`);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
tap.test('setup - TLS echo server', async () => {
|
||||
tlsEchoServer = tls.createServer(
|
||||
{ cert: CERT_PEM, key: KEY_PEM },
|
||||
(socket) => {
|
||||
socket.on('data', (data) => socket.write(data));
|
||||
socket.on('error', () => {});
|
||||
},
|
||||
);
|
||||
await new Promise<void>((resolve) => {
|
||||
tlsEchoServer.listen(TLS_ECHO_PORT, () => {
|
||||
console.log(`TLS echo server on port ${TLS_ECHO_PORT}`);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// ════════════════════════════════════════════════════════════════════════════
|
||||
// Group 1: TCP Forward (plain TCP passthrough — no domain, no TLS)
|
||||
// ════════════════════════════════════════════════════════════════════════════
|
||||
tap.test('TCP forward - real-time byte tracking', async (tools) => {
|
||||
const proxy = new SmartProxy({
|
||||
routes: [
|
||||
{
|
||||
id: 'tcp-forward',
|
||||
name: 'tcp-forward',
|
||||
match: { ports: PROXY_TCP_PORT },
|
||||
action: {
|
||||
type: 'forward',
|
||||
targets: [{ host: 'localhost', port: TCP_ECHO_PORT }],
|
||||
},
|
||||
},
|
||||
],
|
||||
metrics: { enabled: true, sampleIntervalMs: 100, retentionSeconds: 60 },
|
||||
});
|
||||
await proxy.start();
|
||||
|
||||
// Connect and send data
|
||||
const client = new net.Socket();
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
client.connect(PROXY_TCP_PORT, 'localhost', () => resolve());
|
||||
client.on('error', reject);
|
||||
});
|
||||
|
||||
let received = 0;
|
||||
client.on('data', (data) => (received += data.length));
|
||||
|
||||
// Send 10 KB in chunks over 1 second
|
||||
const chunk = Buffer.alloc(1024, 'A');
|
||||
for (let i = 0; i < 10; i++) {
|
||||
client.write(chunk);
|
||||
await tools.delayFor(100);
|
||||
}
|
||||
|
||||
// Wait for echo data and sampling to accumulate
|
||||
await tools.delayFor(500);
|
||||
|
||||
// === Key assertion: metrics visible WHILE the connection is still open ===
|
||||
// Before this change, TCP bytes were only reported after connection close.
|
||||
// Now bytes are reported per-chunk in real-time.
|
||||
await pollMetrics(proxy);
|
||||
|
||||
const mDuring = proxy.getMetrics();
|
||||
const bytesInDuring = mDuring.totals.bytesIn();
|
||||
const bytesOutDuring = mDuring.totals.bytesOut();
|
||||
console.log(`TCP forward (during) — bytesIn: ${bytesInDuring}, bytesOut: ${bytesOutDuring}`);
|
||||
expect(bytesInDuring).toBeGreaterThan(0);
|
||||
expect(bytesOutDuring).toBeGreaterThan(0);
|
||||
|
||||
// Check that throughput is non-zero during active TCP traffic
|
||||
const tpDuring = mDuring.throughput.recent();
|
||||
console.log(`TCP forward (during) — recent throughput: in=${tpDuring.in}, out=${tpDuring.out}`);
|
||||
expect(tpDuring.in + tpDuring.out).toBeGreaterThan(0);
|
||||
|
||||
// Close connection
|
||||
client.destroy();
|
||||
await tools.delayFor(500);
|
||||
|
||||
// Final check
|
||||
await pollMetrics(proxy);
|
||||
const m = proxy.getMetrics();
|
||||
const bytesIn = m.totals.bytesIn();
|
||||
const bytesOut = m.totals.bytesOut();
|
||||
console.log(`TCP forward (final) — bytesIn: ${bytesIn}, bytesOut: ${bytesOut}`);
|
||||
expect(bytesIn).toBeGreaterThanOrEqual(bytesInDuring);
|
||||
expect(bytesOut).toBeGreaterThanOrEqual(bytesOutDuring);
|
||||
|
||||
// Check per-route tracking
|
||||
const byRoute = m.throughput.byRoute();
|
||||
console.log('TCP forward — throughput byRoute:', Array.from(byRoute.entries()));
|
||||
|
||||
await proxy.stop();
|
||||
await tools.delayFor(200);
|
||||
});
|
||||
|
||||
// ════════════════════════════════════════════════════════════════════════════
|
||||
// Group 2: HTTP Forward (plain HTTP proxy)
|
||||
// ════════════════════════════════════════════════════════════════════════════
|
||||
tap.test('HTTP forward - byte totals tracking', async (tools) => {
|
||||
const proxy = new SmartProxy({
|
||||
routes: [
|
||||
{
|
||||
id: 'http-forward',
|
||||
name: 'http-forward',
|
||||
match: { ports: PROXY_HTTP_PORT },
|
||||
action: {
|
||||
type: 'forward',
|
||||
targets: [{ host: 'localhost', port: HTTP_ECHO_PORT }],
|
||||
},
|
||||
},
|
||||
],
|
||||
metrics: { enabled: true, sampleIntervalMs: 100, retentionSeconds: 60 },
|
||||
});
|
||||
await proxy.start();
|
||||
await tools.delayFor(300);
|
||||
|
||||
// Send 10 HTTP requests with 1 KB body each
|
||||
for (let i = 0; i < 10; i++) {
|
||||
const body = 'X'.repeat(1024);
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const req = http.request(
|
||||
{
|
||||
hostname: 'localhost',
|
||||
port: PROXY_HTTP_PORT,
|
||||
path: '/echo',
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'text/plain', 'Content-Length': String(body.length) },
|
||||
},
|
||||
(res) => {
|
||||
let data = '';
|
||||
res.on('data', (chunk) => (data += chunk));
|
||||
res.on('end', () => resolve());
|
||||
},
|
||||
);
|
||||
req.on('error', reject);
|
||||
req.setTimeout(5000, () => {
|
||||
req.destroy();
|
||||
reject(new Error('HTTP request timeout'));
|
||||
});
|
||||
req.end(body);
|
||||
});
|
||||
}
|
||||
|
||||
// Wait for sampling + poll
|
||||
await tools.delayFor(500);
|
||||
await pollMetrics(proxy);
|
||||
|
||||
const m = proxy.getMetrics();
|
||||
const bytesIn = m.totals.bytesIn();
|
||||
const bytesOut = m.totals.bytesOut();
|
||||
console.log(`HTTP forward — bytesIn: ${bytesIn}, bytesOut: ${bytesOut}`);
|
||||
|
||||
// Both directions should have bytes (CountingBody tracks request + response)
|
||||
expect(bytesIn).toBeGreaterThan(0);
|
||||
expect(bytesOut).toBeGreaterThan(0);
|
||||
|
||||
await proxy.stop();
|
||||
await tools.delayFor(200);
|
||||
});
|
||||
|
||||
// ════════════════════════════════════════════════════════════════════════════
|
||||
// Group 3: TLS Passthrough (SNI-based, Rust passes encrypted data through)
|
||||
// ════════════════════════════════════════════════════════════════════════════
|
||||
tap.test('TLS passthrough - byte totals tracking', async (tools) => {
|
||||
const proxy = new SmartProxy({
|
||||
routes: [
|
||||
{
|
||||
id: 'tls-passthrough',
|
||||
name: 'tls-passthrough',
|
||||
match: { ports: PROXY_TLS_PASS_PORT, domains: 'localhost' },
|
||||
action: {
|
||||
type: 'forward',
|
||||
tls: { mode: 'passthrough' },
|
||||
targets: [{ host: 'localhost', port: TLS_ECHO_PORT }],
|
||||
},
|
||||
},
|
||||
],
|
||||
metrics: { enabled: true, sampleIntervalMs: 100, retentionSeconds: 60 },
|
||||
});
|
||||
await proxy.start();
|
||||
await tools.delayFor(300);
|
||||
|
||||
// Connect via TLS through the proxy (SNI: localhost)
|
||||
const tlsClient = tls.connect(
|
||||
{
|
||||
host: 'localhost',
|
||||
port: PROXY_TLS_PASS_PORT,
|
||||
servername: 'localhost',
|
||||
rejectUnauthorized: false,
|
||||
},
|
||||
);
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
tlsClient.on('secureConnect', () => resolve());
|
||||
tlsClient.on('error', reject);
|
||||
});
|
||||
|
||||
// Send some data
|
||||
const data = Buffer.alloc(2048, 'B');
|
||||
tlsClient.write(data);
|
||||
|
||||
// Wait for echo
|
||||
let received = 0;
|
||||
tlsClient.on('data', (chunk) => (received += chunk.length));
|
||||
await tools.delayFor(1000);
|
||||
|
||||
console.log(`TLS passthrough — received ${received} bytes back`);
|
||||
expect(received).toBeGreaterThan(0);
|
||||
|
||||
tlsClient.destroy();
|
||||
await tools.delayFor(500);
|
||||
|
||||
await pollMetrics(proxy);
|
||||
|
||||
const m = proxy.getMetrics();
|
||||
const bytesIn = m.totals.bytesIn();
|
||||
const bytesOut = m.totals.bytesOut();
|
||||
console.log(`TLS passthrough — bytesIn: ${bytesIn}, bytesOut: ${bytesOut}`);
|
||||
|
||||
// TLS passthrough tracks encrypted bytes flowing through
|
||||
expect(bytesIn).toBeGreaterThan(0);
|
||||
expect(bytesOut).toBeGreaterThan(0);
|
||||
|
||||
await proxy.stop();
|
||||
await tools.delayFor(200);
|
||||
});
|
||||
|
||||
// ════════════════════════════════════════════════════════════════════════════
|
||||
// Group 4: TLS Terminate + HTTP (Rust terminates TLS, forwards to HTTP backend)
|
||||
// ════════════════════════════════════════════════════════════════════════════
|
||||
tap.test('TLS terminate + HTTP forward - byte totals tracking', async (tools) => {
|
||||
const proxy = new SmartProxy({
|
||||
routes: [
|
||||
{
|
||||
id: 'tls-terminate',
|
||||
name: 'tls-terminate',
|
||||
match: { ports: PROXY_TLS_TERM_PORT, domains: 'localhost' },
|
||||
action: {
|
||||
type: 'forward',
|
||||
tls: {
|
||||
mode: 'terminate',
|
||||
certificate: {
|
||||
cert: CERT_PEM,
|
||||
key: KEY_PEM,
|
||||
},
|
||||
},
|
||||
targets: [{ host: 'localhost', port: HTTP_ECHO_PORT }],
|
||||
},
|
||||
},
|
||||
],
|
||||
metrics: { enabled: true, sampleIntervalMs: 100, retentionSeconds: 60 },
|
||||
disableDefaultCert: true,
|
||||
});
|
||||
await proxy.start();
|
||||
await tools.delayFor(300);
|
||||
|
||||
// Send HTTPS request through the proxy
|
||||
const body = 'Z'.repeat(2048);
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const req = https.request(
|
||||
{
|
||||
hostname: 'localhost',
|
||||
port: PROXY_TLS_TERM_PORT,
|
||||
path: '/echo',
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'text/plain', 'Content-Length': String(body.length) },
|
||||
rejectUnauthorized: false,
|
||||
},
|
||||
(res) => {
|
||||
let data = '';
|
||||
res.on('data', (chunk) => (data += chunk));
|
||||
res.on('end', () => {
|
||||
console.log(`TLS terminate — response: ${data.slice(0, 50)}...`);
|
||||
resolve();
|
||||
});
|
||||
},
|
||||
);
|
||||
req.on('error', reject);
|
||||
req.setTimeout(5000, () => {
|
||||
req.destroy();
|
||||
reject(new Error('HTTPS request timeout'));
|
||||
});
|
||||
req.end(body);
|
||||
});
|
||||
|
||||
await tools.delayFor(500);
|
||||
await pollMetrics(proxy);
|
||||
|
||||
const m = proxy.getMetrics();
|
||||
const bytesIn = m.totals.bytesIn();
|
||||
const bytesOut = m.totals.bytesOut();
|
||||
console.log(`TLS terminate — bytesIn: ${bytesIn}, bytesOut: ${bytesOut}`);
|
||||
|
||||
// TLS terminate: request body (bytesIn) and response body (bytesOut) via CountingBody
|
||||
expect(bytesIn).toBeGreaterThan(0);
|
||||
expect(bytesOut).toBeGreaterThan(0);
|
||||
|
||||
await proxy.stop();
|
||||
await tools.delayFor(200);
|
||||
});
|
||||
|
||||
// ════════════════════════════════════════════════════════════════════════════
|
||||
// Group 5: Socket Handler (JS callback handling)
|
||||
// ════════════════════════════════════════════════════════════════════════════
|
||||
tap.test('Socket handler - byte totals tracking', async (tools) => {
|
||||
const proxy = new SmartProxy({
|
||||
routes: [
|
||||
{
|
||||
id: 'socket-handler',
|
||||
name: 'socket-handler',
|
||||
match: { ports: PROXY_SOCKET_PORT },
|
||||
action: {
|
||||
type: 'socket-handler',
|
||||
socketHandler: (socket, _context) => {
|
||||
socket.on('data', (data) => socket.write(data)); // echo
|
||||
socket.on('error', () => {});
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
metrics: { enabled: true, sampleIntervalMs: 100, retentionSeconds: 60 },
|
||||
});
|
||||
await proxy.start();
|
||||
await tools.delayFor(300);
|
||||
|
||||
// Connect and send data
|
||||
const client = new net.Socket();
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
client.connect(PROXY_SOCKET_PORT, 'localhost', () => resolve());
|
||||
client.on('error', reject);
|
||||
});
|
||||
|
||||
const data = Buffer.alloc(4096, 'C');
|
||||
client.write(data);
|
||||
|
||||
let received = 0;
|
||||
client.on('data', (chunk) => (received += chunk.length));
|
||||
await tools.delayFor(500);
|
||||
|
||||
console.log(`Socket handler — received ${received} bytes back`);
|
||||
|
||||
client.destroy();
|
||||
await tools.delayFor(500);
|
||||
|
||||
await pollMetrics(proxy);
|
||||
|
||||
const m = proxy.getMetrics();
|
||||
const bytesIn = m.totals.bytesIn();
|
||||
const bytesOut = m.totals.bytesOut();
|
||||
console.log(`Socket handler — bytesIn: ${bytesIn}, bytesOut: ${bytesOut}`);
|
||||
|
||||
// Socket handler relay now records bytes after copy_bidirectional completes
|
||||
expect(bytesIn).toBeGreaterThan(0);
|
||||
expect(bytesOut).toBeGreaterThan(0);
|
||||
|
||||
await proxy.stop();
|
||||
await tools.delayFor(200);
|
||||
});
|
||||
|
||||
// ════════════════════════════════════════════════════════════════════════════
|
||||
// Group 6: Multi-route throughput isolation
|
||||
// ════════════════════════════════════════════════════════════════════════════
|
||||
tap.test('Multi-route throughput isolation', async (tools) => {
|
||||
const proxy = new SmartProxy({
|
||||
routes: [
|
||||
{
|
||||
id: 'route-alpha',
|
||||
name: 'route-alpha',
|
||||
match: { ports: PROXY_MULTI_A_PORT },
|
||||
action: {
|
||||
type: 'forward',
|
||||
targets: [{ host: 'localhost', port: TCP_ECHO_PORT }],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'route-beta',
|
||||
name: 'route-beta',
|
||||
match: { ports: PROXY_MULTI_B_PORT },
|
||||
action: {
|
||||
type: 'forward',
|
||||
targets: [{ host: 'localhost', port: TCP_ECHO_PORT }],
|
||||
},
|
||||
},
|
||||
],
|
||||
metrics: { enabled: true, sampleIntervalMs: 100, retentionSeconds: 60 },
|
||||
});
|
||||
await proxy.start();
|
||||
await tools.delayFor(300);
|
||||
|
||||
// Send different amounts to each route
|
||||
// Route alpha: 8 KB
|
||||
const clientA = new net.Socket();
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
clientA.connect(PROXY_MULTI_A_PORT, 'localhost', () => resolve());
|
||||
clientA.on('error', reject);
|
||||
});
|
||||
clientA.on('data', () => {}); // drain
|
||||
for (let i = 0; i < 8; i++) {
|
||||
clientA.write(Buffer.alloc(1024, 'A'));
|
||||
await tools.delayFor(50);
|
||||
}
|
||||
|
||||
// Route beta: 2 KB
|
||||
const clientB = new net.Socket();
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
clientB.connect(PROXY_MULTI_B_PORT, 'localhost', () => resolve());
|
||||
clientB.on('error', reject);
|
||||
});
|
||||
clientB.on('data', () => {}); // drain
|
||||
for (let i = 0; i < 2; i++) {
|
||||
clientB.write(Buffer.alloc(1024, 'B'));
|
||||
await tools.delayFor(50);
|
||||
}
|
||||
|
||||
await tools.delayFor(500);
|
||||
|
||||
// Close both
|
||||
clientA.destroy();
|
||||
clientB.destroy();
|
||||
await tools.delayFor(500);
|
||||
|
||||
await pollMetrics(proxy);
|
||||
|
||||
const m = proxy.getMetrics();
|
||||
|
||||
// Check per-route throughput exists for both
|
||||
const byRoute = m.throughput.byRoute();
|
||||
console.log('Multi-route — throughput byRoute:', Array.from(byRoute.entries()));
|
||||
|
||||
// Check per-route connection counts
|
||||
const connByRoute = m.connections.byRoute();
|
||||
console.log('Multi-route — connections byRoute:', Array.from(connByRoute.entries()));
|
||||
|
||||
// Both routes should have tracked data
|
||||
const totalIn = m.totals.bytesIn();
|
||||
const totalOut = m.totals.bytesOut();
|
||||
console.log(`Multi-route — total bytesIn: ${totalIn}, bytesOut: ${totalOut}`);
|
||||
|
||||
expect(totalIn).toBeGreaterThan(0);
|
||||
expect(totalOut).toBeGreaterThan(0);
|
||||
|
||||
await proxy.stop();
|
||||
await tools.delayFor(200);
|
||||
});
|
||||
|
||||
// ════════════════════════════════════════════════════════════════════════════
|
||||
// Group 7: Throughput sampling over time (HTTP-based for real-time tracking)
|
||||
//
|
||||
// Uses HTTP proxy path where CountingBody reports bytes incrementally
|
||||
// as each request/response body completes. This allows the sampling task
|
||||
// to capture non-zero throughput during active traffic.
|
||||
// ════════════════════════════════════════════════════════════════════════════
|
||||
tap.test('Throughput sampling - values appear during active HTTP traffic', async (tools) => {
|
||||
const proxy = new SmartProxy({
|
||||
routes: [
|
||||
{
|
||||
id: 'sampling-test',
|
||||
name: 'sampling-test',
|
||||
match: { ports: PROXY_TP_HTTP_PORT },
|
||||
action: {
|
||||
type: 'forward',
|
||||
targets: [{ host: 'localhost', port: HTTP_ECHO_PORT }],
|
||||
},
|
||||
},
|
||||
],
|
||||
metrics: { enabled: true, sampleIntervalMs: 100, retentionSeconds: 60 },
|
||||
});
|
||||
await proxy.start();
|
||||
await tools.delayFor(300);
|
||||
|
||||
// Send HTTP requests continuously for ~2 seconds
|
||||
let sending = true;
|
||||
let requestCount = 0;
|
||||
const sendLoop = (async () => {
|
||||
while (sending) {
|
||||
const body = 'D'.repeat(5120); // 5 KB per request
|
||||
try {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const req = http.request(
|
||||
{
|
||||
hostname: 'localhost',
|
||||
port: PROXY_TP_HTTP_PORT,
|
||||
path: '/echo',
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'text/plain', 'Content-Length': String(body.length) },
|
||||
},
|
||||
(res) => {
|
||||
res.on('data', () => {});
|
||||
res.on('end', () => resolve());
|
||||
},
|
||||
);
|
||||
req.on('error', reject);
|
||||
req.setTimeout(3000, () => {
|
||||
req.destroy();
|
||||
reject(new Error('timeout'));
|
||||
});
|
||||
req.end(body);
|
||||
});
|
||||
requestCount++;
|
||||
} catch {
|
||||
// Ignore errors during shutdown
|
||||
break;
|
||||
}
|
||||
}
|
||||
})();
|
||||
|
||||
// After 1.5 seconds of active traffic, check throughput
|
||||
await tools.delayFor(1500);
|
||||
|
||||
await pollMetrics(proxy);
|
||||
|
||||
const m = proxy.getMetrics();
|
||||
const tp = m.throughput.instant();
|
||||
const totalIn = m.totals.bytesIn();
|
||||
const totalOut = m.totals.bytesOut();
|
||||
console.log(`Sampling test — after 1.5s of traffic: instant in=${tp.in}, out=${tp.out}`);
|
||||
console.log(`Sampling test — totals: bytesIn=${totalIn}, bytesOut=${totalOut}, requests=${requestCount}`);
|
||||
|
||||
// Totals should definitely be non-zero after 1.5s of HTTP requests
|
||||
expect(totalIn + totalOut).toBeGreaterThan(0);
|
||||
|
||||
// Throughput instant should be non-zero during active traffic.
|
||||
// The sampling interval is 100ms, so we've had ~15 samples by now.
|
||||
// Each sample captures bytes from completed HTTP request/response bodies.
|
||||
// Note: this can occasionally be 0 if sample boundaries don't align, so we
|
||||
// also check that at least the throughput was non-zero for *some* recent window.
|
||||
const tpRecent = m.throughput.recent();
|
||||
console.log(`Sampling test — recent throughput: in=${tpRecent.in}, out=${tpRecent.out}`);
|
||||
expect(tpRecent.in + tpRecent.out).toBeGreaterThan(0);
|
||||
|
||||
// Stop sending
|
||||
sending = false;
|
||||
await sendLoop;
|
||||
|
||||
// After traffic stops, wait for metrics to settle
|
||||
await tools.delayFor(500);
|
||||
await pollMetrics(proxy);
|
||||
|
||||
const mAfter = proxy.getMetrics();
|
||||
const tpAfter = mAfter.throughput.instant();
|
||||
console.log(`Sampling test — after traffic stops: instant in=${tpAfter.in}, out=${tpAfter.out}`);
|
||||
|
||||
await proxy.stop();
|
||||
await tools.delayFor(200);
|
||||
});
|
||||
|
||||
// ════════════════════════════════════════════════════════════════════════════
|
||||
// Cleanup
|
||||
// ════════════════════════════════════════════════════════════════════════════
|
||||
tap.test('cleanup - close backend servers', async () => {
|
||||
await new Promise<void>((resolve) => tcpEchoServer.close(() => resolve()));
|
||||
await new Promise<void>((resolve) => httpEchoServer.close(() => resolve()));
|
||||
await new Promise<void>((resolve) => tlsEchoServer.close(() => resolve()));
|
||||
console.log('All backend servers closed');
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
Reference in New Issue
Block a user