2022-03-24 23:11:53 +01:00
|
|
|
import * as plugins from './smartnetwork.plugins.js';
|
2025-04-28 15:30:08 +00:00
|
|
|
import { getLogger } from './logging.js';
|
|
|
|
import { NetworkError, TimeoutError } from './errors.js';
|
2022-03-24 23:11:53 +01:00
|
|
|
import * as stats from './helpers/stats.js';
|
2021-04-28 13:41:55 +00:00
|
|
|
|
2025-04-28 19:27:13 +00:00
|
|
|
export interface SpeedOptions {
|
|
|
|
parallelStreams?: number;
|
|
|
|
duration?: number;
|
|
|
|
}
|
2021-04-28 13:41:55 +00:00
|
|
|
export class CloudflareSpeed {
|
2025-04-28 19:27:13 +00:00
|
|
|
private opts: SpeedOptions;
|
|
|
|
constructor(opts?: SpeedOptions) {
|
|
|
|
this.opts = opts || {};
|
|
|
|
}
|
2021-04-28 13:41:55 +00:00
|
|
|
|
|
|
|
public async speedTest() {
|
|
|
|
const latency = await this.measureLatency();
|
2021-04-28 14:27:22 +00:00
|
|
|
|
2021-04-28 13:41:55 +00:00
|
|
|
const serverLocations = await this.fetchServerLocations();
|
|
|
|
const cgiData = await this.fetchCfCdnCgiTrace();
|
|
|
|
|
2025-04-28 19:27:13 +00:00
|
|
|
// speed tests: either fixed segments or duration-based mode
|
|
|
|
const parallel = this.opts.parallelStreams ?? 1;
|
|
|
|
const measureDownloadParallel = (bytes: number, iterations: number) => {
|
|
|
|
if (parallel <= 1) {
|
|
|
|
return this.measureDownload(bytes, iterations);
|
|
|
|
}
|
|
|
|
return Promise.all(
|
|
|
|
Array(parallel)
|
|
|
|
.fill(null)
|
|
|
|
.map(() => this.measureDownload(bytes, iterations)),
|
|
|
|
).then((arrays) => arrays.flat());
|
|
|
|
};
|
|
|
|
let downloadTests: number[];
|
|
|
|
if (this.opts.duration && this.opts.duration > 0) {
|
|
|
|
// duration-based download: run for specified seconds
|
|
|
|
downloadTests = [];
|
|
|
|
const durMs = this.opts.duration * 1000;
|
|
|
|
const startMs = Date.now();
|
|
|
|
// use medium chunk size for download
|
|
|
|
const chunkBytes = 25001000;
|
|
|
|
while (Date.now() - startMs < durMs) {
|
|
|
|
const speeds = await measureDownloadParallel(chunkBytes, 1);
|
|
|
|
downloadTests.push(...speeds);
|
|
|
|
}
|
|
|
|
if (downloadTests.length === 0) downloadTests = [0];
|
|
|
|
} else {
|
|
|
|
// fixed download segments
|
|
|
|
const t1 = await measureDownloadParallel(101000, 10);
|
|
|
|
const t2 = await measureDownloadParallel(1001000, 8);
|
|
|
|
const t3 = await measureDownloadParallel(10001000, 6);
|
|
|
|
const t4 = await measureDownloadParallel(25001000, 4);
|
|
|
|
const t5 = await measureDownloadParallel(100001000, 1);
|
|
|
|
downloadTests = [...t1, ...t2, ...t3, ...t4, ...t5];
|
|
|
|
}
|
2021-04-28 14:31:30 +00:00
|
|
|
const speedDownload = stats.quartile(downloadTests, 0.9).toFixed(2);
|
2021-04-28 14:27:22 +00:00
|
|
|
|
2025-04-28 19:27:13 +00:00
|
|
|
// lets test the upload speed with configurable parallel streams
|
|
|
|
const measureUploadParallel = (bytes: number, iterations: number) => {
|
|
|
|
if (parallel <= 1) {
|
|
|
|
return this.measureUpload(bytes, iterations);
|
|
|
|
}
|
|
|
|
return Promise.all(
|
|
|
|
Array(parallel)
|
|
|
|
.fill(null)
|
|
|
|
.map(() => this.measureUpload(bytes, iterations)),
|
|
|
|
).then((arrays) => arrays.flat());
|
|
|
|
};
|
|
|
|
let uploadTests: number[];
|
|
|
|
if (this.opts.duration && this.opts.duration > 0) {
|
|
|
|
// duration-based upload: run for specified seconds
|
|
|
|
uploadTests = [];
|
|
|
|
const durMsUp = this.opts.duration * 1000;
|
|
|
|
const startMsUp = Date.now();
|
|
|
|
const chunkBytesUp = 1001000;
|
|
|
|
while (Date.now() - startMsUp < durMsUp) {
|
|
|
|
const speeds = await measureUploadParallel(chunkBytesUp, 1);
|
|
|
|
uploadTests.push(...speeds);
|
|
|
|
}
|
|
|
|
if (uploadTests.length === 0) uploadTests = [0];
|
|
|
|
} else {
|
|
|
|
const u1 = await measureUploadParallel(11000, 10);
|
|
|
|
const u2 = await measureUploadParallel(101000, 10);
|
|
|
|
const u3 = await measureUploadParallel(1001000, 8);
|
|
|
|
uploadTests = [...u1, ...u2, ...u3];
|
|
|
|
}
|
2021-04-28 14:31:30 +00:00
|
|
|
const speedUpload = stats.quartile(uploadTests, 0.9).toFixed(2);
|
2021-04-28 14:27:22 +00:00
|
|
|
|
2021-04-28 13:41:55 +00:00
|
|
|
return {
|
|
|
|
...latency,
|
|
|
|
ip: cgiData.ip,
|
|
|
|
serverLocation: {
|
|
|
|
shortId: cgiData.colo,
|
|
|
|
name: serverLocations[cgiData.colo],
|
2021-04-28 14:27:22 +00:00
|
|
|
availableLocations: serverLocations,
|
|
|
|
},
|
|
|
|
downloadSpeed: speedDownload,
|
|
|
|
uploadSpeed: speedUpload,
|
2021-04-28 13:41:55 +00:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
public async measureLatency() {
|
|
|
|
const measurements: number[] = [];
|
|
|
|
|
|
|
|
for (let i = 0; i < 20; i += 1) {
|
|
|
|
await this.download(1000).then(
|
|
|
|
(response) => {
|
|
|
|
// TTFB - Server processing time
|
|
|
|
measurements.push(response[4] - response[0] - response[6]);
|
|
|
|
},
|
|
|
|
(error) => {
|
2025-04-28 15:30:08 +00:00
|
|
|
getLogger().error('Error measuring latency:', error);
|
2025-04-28 12:58:01 +00:00
|
|
|
},
|
2021-04-28 13:41:55 +00:00
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
return {
|
|
|
|
maxTime: Math.max(...measurements),
|
|
|
|
minTime: Math.min(...measurements),
|
|
|
|
averageTime: stats.average(measurements),
|
|
|
|
medianTime: stats.median(measurements),
|
|
|
|
jitter: stats.jitter(measurements),
|
2021-04-28 14:27:22 +00:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
public async measureDownload(bytes: number, iterations: number) {
|
2022-02-16 23:28:12 +01:00
|
|
|
const measurements: number[] = [];
|
2021-04-28 14:27:22 +00:00
|
|
|
|
|
|
|
for (let i = 0; i < iterations; i += 1) {
|
|
|
|
await this.download(bytes).then(
|
|
|
|
async (response) => {
|
|
|
|
const transferTime = response[5] - response[4];
|
|
|
|
measurements.push(await this.measureSpeed(bytes, transferTime));
|
|
|
|
},
|
|
|
|
(error) => {
|
2025-04-28 15:30:08 +00:00
|
|
|
getLogger().error('Error measuring download chunk:', error);
|
2025-04-28 12:58:01 +00:00
|
|
|
},
|
2021-04-28 14:27:22 +00:00
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
return measurements;
|
|
|
|
}
|
|
|
|
|
|
|
|
public async measureUpload(bytes: number, iterations: number) {
|
2022-02-16 23:28:12 +01:00
|
|
|
const measurements: number[] = [];
|
2021-04-28 14:27:22 +00:00
|
|
|
|
|
|
|
for (let i = 0; i < iterations; i += 1) {
|
|
|
|
await this.upload(bytes).then(
|
|
|
|
async (response) => {
|
|
|
|
const transferTime = response[6];
|
|
|
|
measurements.push(await this.measureSpeed(bytes, transferTime));
|
|
|
|
},
|
|
|
|
(error) => {
|
2025-04-28 15:30:08 +00:00
|
|
|
getLogger().error('Error measuring upload chunk:', error);
|
2025-04-28 12:58:01 +00:00
|
|
|
},
|
2021-04-28 14:27:22 +00:00
|
|
|
);
|
2021-04-28 13:41:55 +00:00
|
|
|
}
|
2021-04-28 14:27:22 +00:00
|
|
|
|
|
|
|
return measurements;
|
|
|
|
}
|
|
|
|
|
|
|
|
public async measureSpeed(bytes: number, duration: number) {
|
|
|
|
return (bytes * 8) / (duration / 1000) / 1e6;
|
2021-04-28 13:41:55 +00:00
|
|
|
}
|
|
|
|
|
2021-04-28 14:27:22 +00:00
|
|
|
public async fetchServerLocations(): Promise<{ [key: string]: string }> {
|
2025-04-28 15:30:08 +00:00
|
|
|
const res = JSON.parse(
|
|
|
|
await this.get('speed.cloudflare.com', '/locations'),
|
|
|
|
) as Array<{ iata: string; city: string }>;
|
|
|
|
return res.reduce(
|
|
|
|
(data: Record<string, string>, optionsArg) => {
|
|
|
|
data[optionsArg.iata] = optionsArg.city;
|
|
|
|
return data;
|
|
|
|
},
|
|
|
|
{} as Record<string, string>,
|
|
|
|
);
|
2021-04-28 13:41:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
public async get(hostname: string, path: string): Promise<string> {
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
const req = plugins.https.request(
|
|
|
|
{
|
|
|
|
hostname,
|
|
|
|
path,
|
|
|
|
method: 'GET',
|
2025-04-28 15:30:08 +00:00
|
|
|
// disable connection pooling to avoid listener accumulation
|
|
|
|
agent: false,
|
2021-04-28 13:41:55 +00:00
|
|
|
},
|
|
|
|
(res) => {
|
2022-02-16 23:28:12 +01:00
|
|
|
const body: Array<Buffer> = [];
|
2021-04-28 13:41:55 +00:00
|
|
|
res.on('data', (chunk) => {
|
|
|
|
body.push(chunk);
|
|
|
|
});
|
|
|
|
res.on('end', () => {
|
|
|
|
try {
|
|
|
|
resolve(Buffer.concat(body).toString());
|
|
|
|
} catch (e) {
|
|
|
|
reject(e);
|
|
|
|
}
|
|
|
|
});
|
2025-04-28 15:30:08 +00:00
|
|
|
req.on('error', (err: Error & { code?: string }) => {
|
|
|
|
reject(new NetworkError(err.message, err.code));
|
|
|
|
});
|
2025-04-28 12:58:01 +00:00
|
|
|
},
|
2021-04-28 13:41:55 +00:00
|
|
|
);
|
|
|
|
|
|
|
|
req.end();
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2022-02-16 23:28:12 +01:00
|
|
|
public async download(bytes: number) {
|
2021-04-28 13:41:55 +00:00
|
|
|
const options = {
|
|
|
|
hostname: 'speed.cloudflare.com',
|
|
|
|
path: `/__down?bytes=${bytes}`,
|
|
|
|
method: 'GET',
|
|
|
|
};
|
|
|
|
|
|
|
|
return this.request(options);
|
|
|
|
}
|
|
|
|
|
2021-04-28 14:27:22 +00:00
|
|
|
public async upload(bytes: number) {
|
|
|
|
const data = '0'.repeat(bytes);
|
|
|
|
const options = {
|
|
|
|
hostname: 'speed.cloudflare.com',
|
|
|
|
path: '/__up',
|
|
|
|
method: 'POST',
|
|
|
|
headers: {
|
|
|
|
'Content-Length': Buffer.byteLength(data),
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
return this.request(options, data);
|
|
|
|
}
|
|
|
|
|
2022-02-16 23:28:12 +01:00
|
|
|
public async request(options: plugins.https.RequestOptions, data = ''): Promise<number[]> {
|
|
|
|
let started: number;
|
|
|
|
let dnsLookup: number;
|
2022-02-17 00:18:23 +01:00
|
|
|
let tcpHandshake: number;
|
2022-02-16 23:28:12 +01:00
|
|
|
let sslHandshake: number;
|
|
|
|
let ttfb: number;
|
|
|
|
let ended: number;
|
2021-04-28 13:41:55 +00:00
|
|
|
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
started = plugins.perfHooks.performance.now();
|
2025-04-28 15:30:08 +00:00
|
|
|
// disable connection pooling to avoid listener accumulation across requests
|
|
|
|
const reqOptions = { ...options, agent: false };
|
|
|
|
const req = plugins.https.request(reqOptions, (res) => {
|
2021-04-28 13:41:55 +00:00
|
|
|
res.once('readable', () => {
|
|
|
|
ttfb = plugins.perfHooks.performance.now();
|
|
|
|
});
|
|
|
|
res.on('data', () => {});
|
|
|
|
res.on('end', () => {
|
|
|
|
ended = plugins.perfHooks.performance.now();
|
2025-04-28 15:30:08 +00:00
|
|
|
resolve([
|
|
|
|
started,
|
|
|
|
dnsLookup,
|
|
|
|
tcpHandshake,
|
|
|
|
sslHandshake,
|
|
|
|
ttfb,
|
|
|
|
ended,
|
|
|
|
parseFloat((res.headers['server-timing'] as string).slice(22)),
|
|
|
|
]);
|
2021-04-28 13:41:55 +00:00
|
|
|
});
|
|
|
|
});
|
|
|
|
|
2025-04-28 15:30:08 +00:00
|
|
|
// Listen for timing events once per new socket
|
|
|
|
req.once('socket', (socket) => {
|
|
|
|
socket.once('lookup', () => {
|
2021-04-28 13:41:55 +00:00
|
|
|
dnsLookup = plugins.perfHooks.performance.now();
|
|
|
|
});
|
2025-04-28 15:30:08 +00:00
|
|
|
socket.once('connect', () => {
|
2021-04-28 13:41:55 +00:00
|
|
|
tcpHandshake = plugins.perfHooks.performance.now();
|
|
|
|
});
|
2025-04-28 15:30:08 +00:00
|
|
|
socket.once('secureConnect', () => {
|
2021-04-28 13:41:55 +00:00
|
|
|
sslHandshake = plugins.perfHooks.performance.now();
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
2025-04-28 19:27:13 +00:00
|
|
|
req.on('error', (error: Error & { code?: string }) => {
|
|
|
|
reject(new NetworkError(error.message, error.code));
|
2021-04-28 13:41:55 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
req.write(data);
|
|
|
|
req.end();
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2025-04-28 19:27:13 +00:00
|
|
|
/**
|
|
|
|
* Fetch Cloudflare's trace endpoint and parse key=value lines to a record.
|
|
|
|
*/
|
|
|
|
public async fetchCfCdnCgiTrace(): Promise<Record<string, string>> {
|
2022-02-16 23:28:12 +01:00
|
|
|
const parseCfCdnCgiTrace = (text: string) =>
|
2021-04-28 13:41:55 +00:00
|
|
|
text
|
|
|
|
.split('\n')
|
|
|
|
.map((i) => {
|
2025-04-28 15:30:08 +00:00
|
|
|
const parts = i.split('=');
|
|
|
|
return [parts[0], parts[1]];
|
2021-04-28 13:41:55 +00:00
|
|
|
})
|
2025-04-28 15:30:08 +00:00
|
|
|
.reduce((data: Record<string, string>, [k, v]) => {
|
2021-04-28 13:41:55 +00:00
|
|
|
if (v === undefined) return data;
|
2025-04-28 15:30:08 +00:00
|
|
|
data[k] = v;
|
|
|
|
return data;
|
|
|
|
}, {} as Record<string, string>);
|
2021-04-28 13:41:55 +00:00
|
|
|
|
|
|
|
return this.get('speed.cloudflare.com', '/cdn-cgi/trace').then(parseCfCdnCgiTrace);
|
|
|
|
}
|
|
|
|
}
|