feat(upstream): Add upstream proxy/cache subsystem and integrate per-protocol upstreams
This commit is contained in:
521
ts/upstream/classes.baseupstream.ts
Normal file
521
ts/upstream/classes.baseupstream.ts
Normal file
@@ -0,0 +1,521 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import type {
|
||||
IUpstreamRegistryConfig,
|
||||
IUpstreamAuthConfig,
|
||||
IUpstreamCacheConfig,
|
||||
IUpstreamResilienceConfig,
|
||||
IUpstreamResult,
|
||||
IUpstreamFetchContext,
|
||||
IProtocolUpstreamConfig,
|
||||
IUpstreamScopeRule,
|
||||
TCircuitState,
|
||||
} from './interfaces.upstream.js';
|
||||
import {
|
||||
DEFAULT_CACHE_CONFIG,
|
||||
DEFAULT_RESILIENCE_CONFIG,
|
||||
} from './interfaces.upstream.js';
|
||||
import { CircuitBreaker, CircuitOpenError, withCircuitBreaker } from './classes.circuitbreaker.js';
|
||||
import { UpstreamCache } from './classes.upstreamcache.js';
|
||||
|
||||
/**
|
||||
* Base class for protocol-specific upstream implementations.
|
||||
*
|
||||
* Provides:
|
||||
* - Multi-upstream routing with priority
|
||||
* - Scope-based filtering (glob patterns)
|
||||
* - Authentication handling
|
||||
* - Circuit breaker per upstream
|
||||
* - Caching with TTL
|
||||
* - Retry with exponential backoff
|
||||
* - 429 rate limit handling
|
||||
*/
|
||||
export abstract class BaseUpstream {
|
||||
/** Protocol name for logging */
|
||||
protected abstract readonly protocolName: string;
|
||||
|
||||
/** Upstream configuration */
|
||||
protected readonly config: IProtocolUpstreamConfig;
|
||||
|
||||
/** Resolved cache configuration */
|
||||
protected readonly cacheConfig: IUpstreamCacheConfig;
|
||||
|
||||
/** Resolved resilience configuration */
|
||||
protected readonly resilienceConfig: IUpstreamResilienceConfig;
|
||||
|
||||
/** Circuit breakers per upstream */
|
||||
protected readonly circuitBreakers: Map<string, CircuitBreaker> = new Map();
|
||||
|
||||
/** Upstream cache */
|
||||
protected readonly cache: UpstreamCache;
|
||||
|
||||
/** Logger instance */
|
||||
protected readonly logger: plugins.smartlog.Smartlog;
|
||||
|
||||
constructor(config: IProtocolUpstreamConfig, logger?: plugins.smartlog.Smartlog) {
|
||||
this.config = config;
|
||||
this.cacheConfig = { ...DEFAULT_CACHE_CONFIG, ...config.cache };
|
||||
this.resilienceConfig = { ...DEFAULT_RESILIENCE_CONFIG, ...config.resilience };
|
||||
this.cache = new UpstreamCache(this.cacheConfig);
|
||||
this.logger = logger || new plugins.smartlog.Smartlog({
|
||||
logContext: {
|
||||
company: 'smartregistry',
|
||||
companyunit: 'upstream',
|
||||
environment: 'production',
|
||||
runtime: 'node',
|
||||
}
|
||||
});
|
||||
|
||||
// Initialize circuit breakers for each upstream
|
||||
for (const upstream of config.upstreams) {
|
||||
const upstreamResilience = { ...this.resilienceConfig, ...upstream.resilience };
|
||||
this.circuitBreakers.set(upstream.id, new CircuitBreaker(upstream.id, upstreamResilience));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if upstream is enabled.
|
||||
*/
|
||||
public isEnabled(): boolean {
|
||||
return this.config.enabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all configured upstreams.
|
||||
*/
|
||||
public getUpstreams(): IUpstreamRegistryConfig[] {
|
||||
return this.config.upstreams;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get circuit breaker state for an upstream.
|
||||
*/
|
||||
public getCircuitState(upstreamId: string): TCircuitState | null {
|
||||
const breaker = this.circuitBreakers.get(upstreamId);
|
||||
return breaker ? breaker.getState() : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cache statistics.
|
||||
*/
|
||||
public getCacheStats() {
|
||||
return this.cache.getStats();
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch a resource from upstreams.
|
||||
* Tries upstreams in priority order, respecting circuit breakers and scope rules.
|
||||
*/
|
||||
public async fetch(context: IUpstreamFetchContext): Promise<IUpstreamResult | null> {
|
||||
if (!this.config.enabled) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check cache first
|
||||
const cached = this.cache.get(context);
|
||||
if (cached && !cached.stale) {
|
||||
return {
|
||||
success: true,
|
||||
status: 200,
|
||||
headers: cached.headers,
|
||||
body: cached.data,
|
||||
upstreamId: cached.upstreamId,
|
||||
fromCache: true,
|
||||
latencyMs: 0,
|
||||
};
|
||||
}
|
||||
|
||||
// Check for negative cache (recent 404)
|
||||
if (this.cache.hasNegative(context)) {
|
||||
return {
|
||||
success: false,
|
||||
status: 404,
|
||||
headers: {},
|
||||
upstreamId: 'cache',
|
||||
fromCache: true,
|
||||
latencyMs: 0,
|
||||
};
|
||||
}
|
||||
|
||||
// Get applicable upstreams sorted by priority
|
||||
const applicableUpstreams = this.getApplicableUpstreams(context.resource);
|
||||
|
||||
if (applicableUpstreams.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// If we have stale cache, return it immediately and revalidate in background
|
||||
if (cached?.stale && this.cacheConfig.staleWhileRevalidate) {
|
||||
// Fire and forget revalidation
|
||||
this.revalidateInBackground(context, applicableUpstreams);
|
||||
return {
|
||||
success: true,
|
||||
status: 200,
|
||||
headers: cached.headers,
|
||||
body: cached.data,
|
||||
upstreamId: cached.upstreamId,
|
||||
fromCache: true,
|
||||
latencyMs: 0,
|
||||
};
|
||||
}
|
||||
|
||||
// Try each upstream in order
|
||||
let lastError: Error | null = null;
|
||||
|
||||
for (const upstream of applicableUpstreams) {
|
||||
const breaker = this.circuitBreakers.get(upstream.id);
|
||||
if (!breaker) continue;
|
||||
|
||||
try {
|
||||
const result = await withCircuitBreaker(
|
||||
breaker,
|
||||
() => this.fetchFromUpstream(upstream, context),
|
||||
);
|
||||
|
||||
// Cache successful responses
|
||||
if (result.success && result.body) {
|
||||
this.cache.set(
|
||||
context,
|
||||
Buffer.isBuffer(result.body) ? result.body : Buffer.from(JSON.stringify(result.body)),
|
||||
result.headers['content-type'] || 'application/octet-stream',
|
||||
result.headers,
|
||||
upstream.id,
|
||||
);
|
||||
}
|
||||
|
||||
// Cache 404 responses
|
||||
if (result.status === 404) {
|
||||
this.cache.setNegative(context, upstream.id);
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
if (error instanceof CircuitOpenError) {
|
||||
this.logger.log('debug', `Circuit open for upstream ${upstream.id}, trying next`);
|
||||
} else {
|
||||
this.logger.log('warn', `Upstream ${upstream.id} failed: ${(error as Error).message}`);
|
||||
}
|
||||
lastError = error as Error;
|
||||
// Continue to next upstream
|
||||
}
|
||||
}
|
||||
|
||||
// All upstreams failed
|
||||
if (lastError) {
|
||||
this.logger.log('error', `All upstreams failed for ${context.resource}: ${lastError.message}`);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidate cache for a resource pattern.
|
||||
*/
|
||||
public invalidateCache(pattern: RegExp): number {
|
||||
return this.cache.invalidatePattern(pattern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all cache entries.
|
||||
*/
|
||||
public clearCache(): void {
|
||||
this.cache.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the upstream (cleanup resources).
|
||||
*/
|
||||
public stop(): void {
|
||||
this.cache.stop();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get upstreams that apply to a resource, sorted by priority.
|
||||
*/
|
||||
protected getApplicableUpstreams(resource: string): IUpstreamRegistryConfig[] {
|
||||
return this.config.upstreams
|
||||
.filter(upstream => {
|
||||
if (!upstream.enabled) return false;
|
||||
|
||||
// Check circuit breaker
|
||||
const breaker = this.circuitBreakers.get(upstream.id);
|
||||
if (breaker && !breaker.canRequest()) return false;
|
||||
|
||||
// Check scope rules
|
||||
return this.matchesScopeRules(resource, upstream.scopeRules);
|
||||
})
|
||||
.sort((a, b) => a.priority - b.priority);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a resource matches scope rules.
|
||||
* Empty rules = match all.
|
||||
*/
|
||||
protected matchesScopeRules(resource: string, rules?: IUpstreamScopeRule[]): boolean {
|
||||
if (!rules || rules.length === 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Process rules in order
|
||||
// Start with default exclude (nothing matches)
|
||||
let matched = false;
|
||||
|
||||
for (const rule of rules) {
|
||||
const isMatch = plugins.minimatch(resource, rule.pattern);
|
||||
if (isMatch) {
|
||||
matched = rule.action === 'include';
|
||||
}
|
||||
}
|
||||
|
||||
return matched;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch from a specific upstream with retry logic.
|
||||
*/
|
||||
protected async fetchFromUpstream(
|
||||
upstream: IUpstreamRegistryConfig,
|
||||
context: IUpstreamFetchContext,
|
||||
): Promise<IUpstreamResult> {
|
||||
const upstreamResilience = { ...this.resilienceConfig, ...upstream.resilience };
|
||||
const startTime = Date.now();
|
||||
|
||||
let lastError: Error | null = null;
|
||||
|
||||
for (let attempt = 0; attempt <= upstreamResilience.maxRetries; attempt++) {
|
||||
try {
|
||||
const result = await this.executeRequest(upstream, context, upstreamResilience.timeoutMs);
|
||||
return {
|
||||
...result,
|
||||
upstreamId: upstream.id,
|
||||
fromCache: false,
|
||||
latencyMs: Date.now() - startTime,
|
||||
};
|
||||
} catch (error) {
|
||||
lastError = error as Error;
|
||||
|
||||
// Don't retry on 4xx errors (except 429)
|
||||
if (this.isNonRetryableError(error)) {
|
||||
break;
|
||||
}
|
||||
|
||||
// Calculate delay with exponential backoff and jitter
|
||||
if (attempt < upstreamResilience.maxRetries) {
|
||||
const delay = this.calculateBackoffDelay(
|
||||
attempt,
|
||||
upstreamResilience.retryDelayMs,
|
||||
upstreamResilience.retryMaxDelayMs,
|
||||
);
|
||||
await this.sleep(delay);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError || new Error('Request failed');
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a single HTTP request to an upstream.
|
||||
*/
|
||||
protected async executeRequest(
|
||||
upstream: IUpstreamRegistryConfig,
|
||||
context: IUpstreamFetchContext,
|
||||
timeoutMs: number,
|
||||
): Promise<Omit<IUpstreamResult, 'upstreamId' | 'fromCache' | 'latencyMs'>> {
|
||||
// Build the full URL
|
||||
const url = this.buildUpstreamUrl(upstream, context);
|
||||
|
||||
// Build headers with auth
|
||||
const headers = this.buildHeaders(upstream, context);
|
||||
|
||||
// Make the request using SmartRequest
|
||||
const request = plugins.smartrequest.SmartRequest.create()
|
||||
.url(url)
|
||||
.method(context.method as any)
|
||||
.headers(headers)
|
||||
.timeout(timeoutMs)
|
||||
.handle429Backoff({ maxRetries: 3, fallbackDelay: 1000, maxWaitTime: 30000 });
|
||||
|
||||
// Add query params if present
|
||||
if (Object.keys(context.query).length > 0) {
|
||||
request.query(context.query);
|
||||
}
|
||||
|
||||
let response: plugins.smartrequest.ICoreResponse;
|
||||
|
||||
switch (context.method.toUpperCase()) {
|
||||
case 'GET':
|
||||
response = await request.get();
|
||||
break;
|
||||
case 'HEAD':
|
||||
// SmartRequest doesn't have head(), use options
|
||||
response = await request.method('HEAD').get();
|
||||
break;
|
||||
default:
|
||||
response = await request.get();
|
||||
}
|
||||
|
||||
// Parse response
|
||||
const responseHeaders: Record<string, string> = {};
|
||||
for (const [key, value] of Object.entries(response.headers)) {
|
||||
responseHeaders[key.toLowerCase()] = Array.isArray(value) ? value[0] : value;
|
||||
}
|
||||
|
||||
let body: Buffer | any;
|
||||
const contentType = responseHeaders['content-type'] || '';
|
||||
|
||||
if (response.ok) {
|
||||
if (contentType.includes('application/json')) {
|
||||
body = await response.json();
|
||||
} else {
|
||||
const arrayBuffer = await response.arrayBuffer();
|
||||
body = Buffer.from(arrayBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: response.ok,
|
||||
status: response.status,
|
||||
headers: responseHeaders,
|
||||
body,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the full URL for an upstream request.
|
||||
* Subclasses can override for protocol-specific URL building.
|
||||
*/
|
||||
protected buildUpstreamUrl(upstream: IUpstreamRegistryConfig, context: IUpstreamFetchContext): string {
|
||||
// Remove leading slash if URL already has trailing slash
|
||||
let path = context.path;
|
||||
if (upstream.url.endsWith('/') && path.startsWith('/')) {
|
||||
path = path.slice(1);
|
||||
}
|
||||
return `${upstream.url}${path}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build headers including authentication.
|
||||
*/
|
||||
protected buildHeaders(
|
||||
upstream: IUpstreamRegistryConfig,
|
||||
context: IUpstreamFetchContext,
|
||||
): Record<string, string> {
|
||||
const headers: Record<string, string> = { ...context.headers };
|
||||
|
||||
// Remove host header (will be set by HTTP client)
|
||||
delete headers['host'];
|
||||
|
||||
// Add authentication
|
||||
this.addAuthHeaders(headers, upstream.auth);
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add authentication headers based on auth config.
|
||||
*/
|
||||
protected addAuthHeaders(headers: Record<string, string>, auth: IUpstreamAuthConfig): void {
|
||||
switch (auth.type) {
|
||||
case 'basic':
|
||||
if (auth.username && auth.password) {
|
||||
const credentials = Buffer.from(`${auth.username}:${auth.password}`).toString('base64');
|
||||
headers['authorization'] = `Basic ${credentials}`;
|
||||
}
|
||||
break;
|
||||
case 'bearer':
|
||||
if (auth.token) {
|
||||
headers['authorization'] = `Bearer ${auth.token}`;
|
||||
}
|
||||
break;
|
||||
case 'api-key':
|
||||
if (auth.token) {
|
||||
const headerName = auth.headerName || 'authorization';
|
||||
headers[headerName.toLowerCase()] = auth.token;
|
||||
}
|
||||
break;
|
||||
case 'none':
|
||||
default:
|
||||
// No authentication
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an error should not be retried.
|
||||
*/
|
||||
protected isNonRetryableError(error: unknown): boolean {
|
||||
// Check for HTTP status errors
|
||||
if (error && typeof error === 'object' && 'status' in error) {
|
||||
const status = (error as { status: number }).status;
|
||||
// Don't retry 4xx errors except 429 (rate limited)
|
||||
if (status >= 400 && status < 500 && status !== 429) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate backoff delay with exponential backoff and jitter.
|
||||
*/
|
||||
protected calculateBackoffDelay(
|
||||
attempt: number,
|
||||
baseDelayMs: number,
|
||||
maxDelayMs: number,
|
||||
): number {
|
||||
// Exponential backoff: delay = base * 2^attempt
|
||||
const exponentialDelay = baseDelayMs * Math.pow(2, attempt);
|
||||
|
||||
// Cap at max delay
|
||||
const cappedDelay = Math.min(exponentialDelay, maxDelayMs);
|
||||
|
||||
// Add jitter (±25%)
|
||||
const jitter = cappedDelay * 0.25 * (Math.random() * 2 - 1);
|
||||
|
||||
return Math.floor(cappedDelay + jitter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sleep for a specified duration.
|
||||
*/
|
||||
protected sleep(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
/**
|
||||
* Revalidate cache in background.
|
||||
*/
|
||||
protected async revalidateInBackground(
|
||||
context: IUpstreamFetchContext,
|
||||
upstreams: IUpstreamRegistryConfig[],
|
||||
): Promise<void> {
|
||||
try {
|
||||
for (const upstream of upstreams) {
|
||||
const breaker = this.circuitBreakers.get(upstream.id);
|
||||
if (!breaker || !breaker.canRequest()) continue;
|
||||
|
||||
try {
|
||||
const result = await withCircuitBreaker(
|
||||
breaker,
|
||||
() => this.fetchFromUpstream(upstream, context),
|
||||
);
|
||||
|
||||
if (result.success && result.body) {
|
||||
this.cache.set(
|
||||
context,
|
||||
Buffer.isBuffer(result.body) ? result.body : Buffer.from(JSON.stringify(result.body)),
|
||||
result.headers['content-type'] || 'application/octet-stream',
|
||||
result.headers,
|
||||
upstream.id,
|
||||
);
|
||||
return; // Successfully revalidated
|
||||
}
|
||||
} catch {
|
||||
// Continue to next upstream
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.log('debug', `Background revalidation failed: ${(error as Error).message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
238
ts/upstream/classes.circuitbreaker.ts
Normal file
238
ts/upstream/classes.circuitbreaker.ts
Normal file
@@ -0,0 +1,238 @@
|
||||
import type { TCircuitState, IUpstreamResilienceConfig } from './interfaces.upstream.js';
|
||||
import { DEFAULT_RESILIENCE_CONFIG } from './interfaces.upstream.js';
|
||||
|
||||
/**
|
||||
* Circuit breaker implementation for upstream resilience.
|
||||
*
|
||||
* States:
|
||||
* - CLOSED: Normal operation, requests pass through
|
||||
* - OPEN: Circuit is tripped, requests fail fast
|
||||
* - HALF_OPEN: Testing if upstream has recovered
|
||||
*
|
||||
* Transitions:
|
||||
* - CLOSED → OPEN: When failure count exceeds threshold
|
||||
* - OPEN → HALF_OPEN: After reset timeout expires
|
||||
* - HALF_OPEN → CLOSED: On successful request
|
||||
* - HALF_OPEN → OPEN: On failed request
|
||||
*/
|
||||
export class CircuitBreaker {
|
||||
/** Unique identifier for logging and metrics */
|
||||
public readonly id: string;
|
||||
|
||||
/** Current circuit state */
|
||||
private state: TCircuitState = 'CLOSED';
|
||||
|
||||
/** Count of consecutive failures */
|
||||
private failureCount: number = 0;
|
||||
|
||||
/** Timestamp when circuit was opened */
|
||||
private openedAt: number = 0;
|
||||
|
||||
/** Number of successful requests in half-open state */
|
||||
private halfOpenSuccesses: number = 0;
|
||||
|
||||
/** Configuration */
|
||||
private readonly config: IUpstreamResilienceConfig;
|
||||
|
||||
/** Number of successes required to close circuit from half-open */
|
||||
private readonly halfOpenThreshold: number = 2;
|
||||
|
||||
constructor(id: string, config?: Partial<IUpstreamResilienceConfig>) {
|
||||
this.id = id;
|
||||
this.config = { ...DEFAULT_RESILIENCE_CONFIG, ...config };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current circuit state.
|
||||
*/
|
||||
public getState(): TCircuitState {
|
||||
// Check if we should transition from OPEN to HALF_OPEN
|
||||
if (this.state === 'OPEN') {
|
||||
const elapsed = Date.now() - this.openedAt;
|
||||
if (elapsed >= this.config.circuitBreakerResetMs) {
|
||||
this.transitionTo('HALF_OPEN');
|
||||
}
|
||||
}
|
||||
return this.state;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if circuit allows requests.
|
||||
* Returns true if requests should be allowed.
|
||||
*/
|
||||
public canRequest(): boolean {
|
||||
const currentState = this.getState();
|
||||
return currentState !== 'OPEN';
|
||||
}
|
||||
|
||||
/**
|
||||
* Record a successful request.
|
||||
* May transition circuit from HALF_OPEN to CLOSED.
|
||||
*/
|
||||
public recordSuccess(): void {
|
||||
if (this.state === 'HALF_OPEN') {
|
||||
this.halfOpenSuccesses++;
|
||||
if (this.halfOpenSuccesses >= this.halfOpenThreshold) {
|
||||
this.transitionTo('CLOSED');
|
||||
}
|
||||
} else if (this.state === 'CLOSED') {
|
||||
// Reset failure count on success
|
||||
this.failureCount = 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Record a failed request.
|
||||
* May transition circuit from CLOSED/HALF_OPEN to OPEN.
|
||||
*/
|
||||
public recordFailure(): void {
|
||||
if (this.state === 'HALF_OPEN') {
|
||||
// Any failure in half-open immediately opens circuit
|
||||
this.transitionTo('OPEN');
|
||||
} else if (this.state === 'CLOSED') {
|
||||
this.failureCount++;
|
||||
if (this.failureCount >= this.config.circuitBreakerThreshold) {
|
||||
this.transitionTo('OPEN');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Force circuit to open state.
|
||||
* Useful for manual intervention or external health checks.
|
||||
*/
|
||||
public forceOpen(): void {
|
||||
this.transitionTo('OPEN');
|
||||
}
|
||||
|
||||
/**
|
||||
* Force circuit to closed state.
|
||||
* Useful for manual intervention after fixing upstream issues.
|
||||
*/
|
||||
public forceClose(): void {
|
||||
this.transitionTo('CLOSED');
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset circuit to initial state.
|
||||
*/
|
||||
public reset(): void {
|
||||
this.state = 'CLOSED';
|
||||
this.failureCount = 0;
|
||||
this.openedAt = 0;
|
||||
this.halfOpenSuccesses = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get circuit metrics for monitoring.
|
||||
*/
|
||||
public getMetrics(): ICircuitBreakerMetrics {
|
||||
return {
|
||||
id: this.id,
|
||||
state: this.getState(),
|
||||
failureCount: this.failureCount,
|
||||
openedAt: this.openedAt > 0 ? new Date(this.openedAt) : null,
|
||||
timeUntilHalfOpen: this.state === 'OPEN'
|
||||
? Math.max(0, this.config.circuitBreakerResetMs - (Date.now() - this.openedAt))
|
||||
: 0,
|
||||
halfOpenSuccesses: this.halfOpenSuccesses,
|
||||
threshold: this.config.circuitBreakerThreshold,
|
||||
resetMs: this.config.circuitBreakerResetMs,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Transition to a new state with proper cleanup.
|
||||
*/
|
||||
private transitionTo(newState: TCircuitState): void {
|
||||
const previousState = this.state;
|
||||
this.state = newState;
|
||||
|
||||
switch (newState) {
|
||||
case 'OPEN':
|
||||
this.openedAt = Date.now();
|
||||
this.halfOpenSuccesses = 0;
|
||||
break;
|
||||
case 'HALF_OPEN':
|
||||
this.halfOpenSuccesses = 0;
|
||||
break;
|
||||
case 'CLOSED':
|
||||
this.failureCount = 0;
|
||||
this.openedAt = 0;
|
||||
this.halfOpenSuccesses = 0;
|
||||
break;
|
||||
}
|
||||
|
||||
// Log state transition (useful for debugging and monitoring)
|
||||
// In production, this would emit events or metrics
|
||||
if (previousState !== newState) {
|
||||
// State changed - could emit event here
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Metrics for circuit breaker monitoring.
|
||||
*/
|
||||
export interface ICircuitBreakerMetrics {
|
||||
/** Circuit breaker identifier */
|
||||
id: string;
|
||||
/** Current state */
|
||||
state: TCircuitState;
|
||||
/** Number of consecutive failures */
|
||||
failureCount: number;
|
||||
/** When circuit was opened (null if never opened) */
|
||||
openedAt: Date | null;
|
||||
/** Milliseconds until circuit transitions to half-open (0 if not open) */
|
||||
timeUntilHalfOpen: number;
|
||||
/** Number of successes in half-open state */
|
||||
halfOpenSuccesses: number;
|
||||
/** Failure threshold for opening circuit */
|
||||
threshold: number;
|
||||
/** Reset timeout in milliseconds */
|
||||
resetMs: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a function with circuit breaker protection.
|
||||
*
|
||||
* @param breaker The circuit breaker to use
|
||||
* @param fn The async function to execute
|
||||
* @param fallback Optional fallback function when circuit is open
|
||||
* @returns The result of fn or fallback
|
||||
* @throws CircuitOpenError if circuit is open and no fallback provided
|
||||
*/
|
||||
export async function withCircuitBreaker<T>(
|
||||
breaker: CircuitBreaker,
|
||||
fn: () => Promise<T>,
|
||||
fallback?: () => Promise<T>,
|
||||
): Promise<T> {
|
||||
if (!breaker.canRequest()) {
|
||||
if (fallback) {
|
||||
return fallback();
|
||||
}
|
||||
throw new CircuitOpenError(breaker.id);
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await fn();
|
||||
breaker.recordSuccess();
|
||||
return result;
|
||||
} catch (error) {
|
||||
breaker.recordFailure();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Error thrown when circuit is open and no fallback is provided.
|
||||
*/
|
||||
export class CircuitOpenError extends Error {
|
||||
public readonly circuitId: string;
|
||||
|
||||
constructor(circuitId: string) {
|
||||
super(`Circuit breaker '${circuitId}' is open`);
|
||||
this.name = 'CircuitOpenError';
|
||||
this.circuitId = circuitId;
|
||||
}
|
||||
}
|
||||
423
ts/upstream/classes.upstreamcache.ts
Normal file
423
ts/upstream/classes.upstreamcache.ts
Normal file
@@ -0,0 +1,423 @@
|
||||
import type {
|
||||
ICacheEntry,
|
||||
IUpstreamCacheConfig,
|
||||
IUpstreamFetchContext,
|
||||
} from './interfaces.upstream.js';
|
||||
import { DEFAULT_CACHE_CONFIG } from './interfaces.upstream.js';
|
||||
|
||||
/**
|
||||
* In-memory cache for upstream responses.
|
||||
*
|
||||
* Features:
|
||||
* - TTL-based expiration
|
||||
* - Stale-while-revalidate support
|
||||
* - Negative caching (404s)
|
||||
* - Content-type aware caching
|
||||
* - ETag support for conditional requests
|
||||
*
|
||||
* Note: This is an in-memory implementation. For production with persistence,
|
||||
* extend this class to use RegistryStorage for S3-backed caching.
|
||||
*/
|
||||
export class UpstreamCache {
|
||||
/** Cache storage */
|
||||
private readonly cache: Map<string, ICacheEntry> = new Map();
|
||||
|
||||
/** Configuration */
|
||||
private readonly config: IUpstreamCacheConfig;
|
||||
|
||||
/** Maximum cache entries (prevents memory bloat) */
|
||||
private readonly maxEntries: number;
|
||||
|
||||
/** Cleanup interval handle */
|
||||
private cleanupInterval: ReturnType<typeof setInterval> | null = null;
|
||||
|
||||
constructor(config?: Partial<IUpstreamCacheConfig>, maxEntries: number = 10000) {
|
||||
this.config = { ...DEFAULT_CACHE_CONFIG, ...config };
|
||||
this.maxEntries = maxEntries;
|
||||
|
||||
// Start periodic cleanup if caching is enabled
|
||||
if (this.config.enabled) {
|
||||
this.startCleanup();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if caching is enabled.
|
||||
*/
|
||||
public isEnabled(): boolean {
|
||||
return this.config.enabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cached entry for a request context.
|
||||
* Returns null if not found or expired (unless stale-while-revalidate).
|
||||
*/
|
||||
public get(context: IUpstreamFetchContext): ICacheEntry | null {
|
||||
if (!this.config.enabled) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const key = this.buildCacheKey(context);
|
||||
const entry = this.cache.get(key);
|
||||
|
||||
if (!entry) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const now = new Date();
|
||||
|
||||
// Check if entry is expired
|
||||
if (entry.expiresAt && entry.expiresAt < now) {
|
||||
// Check if we can serve stale content
|
||||
if (this.config.staleWhileRevalidate && !entry.stale) {
|
||||
const staleAge = (now.getTime() - entry.expiresAt.getTime()) / 1000;
|
||||
if (staleAge <= this.config.staleMaxAgeSeconds) {
|
||||
// Mark as stale and return
|
||||
entry.stale = true;
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
// Entry is too old, remove it
|
||||
this.cache.delete(key);
|
||||
return null;
|
||||
}
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Store a response in the cache.
|
||||
*/
|
||||
public set(
|
||||
context: IUpstreamFetchContext,
|
||||
data: Buffer,
|
||||
contentType: string,
|
||||
headers: Record<string, string>,
|
||||
upstreamId: string,
|
||||
options?: ICacheSetOptions,
|
||||
): void {
|
||||
if (!this.config.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Enforce max entries limit
|
||||
if (this.cache.size >= this.maxEntries) {
|
||||
this.evictOldest();
|
||||
}
|
||||
|
||||
const key = this.buildCacheKey(context);
|
||||
const now = new Date();
|
||||
|
||||
// Determine TTL based on content type
|
||||
const ttlSeconds = options?.ttlSeconds ?? this.determineTtl(context, contentType, headers);
|
||||
|
||||
const entry: ICacheEntry = {
|
||||
data,
|
||||
contentType,
|
||||
headers,
|
||||
cachedAt: now,
|
||||
expiresAt: ttlSeconds > 0 ? new Date(now.getTime() + ttlSeconds * 1000) : undefined,
|
||||
etag: headers['etag'] || options?.etag,
|
||||
upstreamId,
|
||||
stale: false,
|
||||
};
|
||||
|
||||
this.cache.set(key, entry);
|
||||
}
|
||||
|
||||
/**
|
||||
* Store a negative cache entry (404 response).
|
||||
*/
|
||||
public setNegative(context: IUpstreamFetchContext, upstreamId: string): void {
|
||||
if (!this.config.enabled || this.config.negativeCacheTtlSeconds <= 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const key = this.buildCacheKey(context);
|
||||
const now = new Date();
|
||||
|
||||
const entry: ICacheEntry = {
|
||||
data: Buffer.from(''),
|
||||
contentType: 'application/octet-stream',
|
||||
headers: {},
|
||||
cachedAt: now,
|
||||
expiresAt: new Date(now.getTime() + this.config.negativeCacheTtlSeconds * 1000),
|
||||
upstreamId,
|
||||
stale: false,
|
||||
};
|
||||
|
||||
this.cache.set(key, entry);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if there's a negative cache entry for this context.
|
||||
*/
|
||||
public hasNegative(context: IUpstreamFetchContext): boolean {
|
||||
const entry = this.get(context);
|
||||
return entry !== null && entry.data.length === 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidate a specific cache entry.
|
||||
*/
|
||||
public invalidate(context: IUpstreamFetchContext): boolean {
|
||||
const key = this.buildCacheKey(context);
|
||||
return this.cache.delete(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidate all entries matching a pattern.
|
||||
* Useful for invalidating all versions of a package.
|
||||
*/
|
||||
public invalidatePattern(pattern: RegExp): number {
|
||||
let count = 0;
|
||||
for (const key of this.cache.keys()) {
|
||||
if (pattern.test(key)) {
|
||||
this.cache.delete(key);
|
||||
count++;
|
||||
}
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidate all entries from a specific upstream.
|
||||
*/
|
||||
public invalidateUpstream(upstreamId: string): number {
|
||||
let count = 0;
|
||||
for (const [key, entry] of this.cache.entries()) {
|
||||
if (entry.upstreamId === upstreamId) {
|
||||
this.cache.delete(key);
|
||||
count++;
|
||||
}
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all cache entries.
|
||||
*/
|
||||
public clear(): void {
|
||||
this.cache.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cache statistics.
|
||||
*/
|
||||
public getStats(): ICacheStats {
|
||||
let freshCount = 0;
|
||||
let staleCount = 0;
|
||||
let negativeCount = 0;
|
||||
let totalSize = 0;
|
||||
const now = new Date();
|
||||
|
||||
for (const entry of this.cache.values()) {
|
||||
totalSize += entry.data.length;
|
||||
|
||||
if (entry.data.length === 0) {
|
||||
negativeCount++;
|
||||
} else if (entry.stale || (entry.expiresAt && entry.expiresAt < now)) {
|
||||
staleCount++;
|
||||
} else {
|
||||
freshCount++;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
totalEntries: this.cache.size,
|
||||
freshEntries: freshCount,
|
||||
staleEntries: staleCount,
|
||||
negativeEntries: negativeCount,
|
||||
totalSizeBytes: totalSize,
|
||||
maxEntries: this.maxEntries,
|
||||
enabled: this.config.enabled,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the cache and cleanup.
|
||||
*/
|
||||
public stop(): void {
|
||||
if (this.cleanupInterval) {
|
||||
clearInterval(this.cleanupInterval);
|
||||
this.cleanupInterval = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a unique cache key for a request context.
|
||||
*/
|
||||
private buildCacheKey(context: IUpstreamFetchContext): string {
|
||||
// Include method, protocol, path, and sorted query params
|
||||
const queryString = Object.keys(context.query)
|
||||
.sort()
|
||||
.map(k => `${k}=${context.query[k]}`)
|
||||
.join('&');
|
||||
|
||||
return `${context.protocol}:${context.method}:${context.path}${queryString ? '?' + queryString : ''}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine TTL based on content characteristics.
|
||||
*/
|
||||
private determineTtl(
|
||||
context: IUpstreamFetchContext,
|
||||
contentType: string,
|
||||
headers: Record<string, string>,
|
||||
): number {
|
||||
// Check for Cache-Control header
|
||||
const cacheControl = headers['cache-control'];
|
||||
if (cacheControl) {
|
||||
const maxAgeMatch = cacheControl.match(/max-age=(\d+)/);
|
||||
if (maxAgeMatch) {
|
||||
return parseInt(maxAgeMatch[1], 10);
|
||||
}
|
||||
if (cacheControl.includes('no-store') || cacheControl.includes('no-cache')) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if content is immutable (content-addressable)
|
||||
if (this.isImmutableContent(context, contentType)) {
|
||||
return this.config.immutableTtlSeconds;
|
||||
}
|
||||
|
||||
// Default TTL for mutable content
|
||||
return this.config.defaultTtlSeconds;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if content is immutable (content-addressable).
|
||||
*/
|
||||
private isImmutableContent(context: IUpstreamFetchContext, contentType: string): boolean {
|
||||
// OCI blobs with digest are immutable
|
||||
if (context.protocol === 'oci' && context.resourceType === 'blob') {
|
||||
return true;
|
||||
}
|
||||
|
||||
// NPM tarballs are immutable (versioned)
|
||||
if (context.protocol === 'npm' && context.resourceType === 'tarball') {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Maven artifacts with version are immutable
|
||||
if (context.protocol === 'maven' && context.resourceType === 'artifact') {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Cargo crate files are immutable
|
||||
if (context.protocol === 'cargo' && context.resourceType === 'crate') {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Composer dist files are immutable
|
||||
if (context.protocol === 'composer' && context.resourceType === 'dist') {
|
||||
return true;
|
||||
}
|
||||
|
||||
// PyPI package files are immutable
|
||||
if (context.protocol === 'pypi' && context.resourceType === 'package') {
|
||||
return true;
|
||||
}
|
||||
|
||||
// RubyGems .gem files are immutable
|
||||
if (context.protocol === 'rubygems' && context.resourceType === 'gem') {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Evict oldest entries to make room for new ones.
|
||||
*/
|
||||
private evictOldest(): void {
|
||||
// Evict 10% of max entries
|
||||
const evictCount = Math.ceil(this.maxEntries * 0.1);
|
||||
let evicted = 0;
|
||||
|
||||
// First, try to evict stale entries
|
||||
const now = new Date();
|
||||
for (const [key, entry] of this.cache.entries()) {
|
||||
if (evicted >= evictCount) break;
|
||||
if (entry.stale || (entry.expiresAt && entry.expiresAt < now)) {
|
||||
this.cache.delete(key);
|
||||
evicted++;
|
||||
}
|
||||
}
|
||||
|
||||
// If not enough evicted, evict oldest by cachedAt
|
||||
if (evicted < evictCount) {
|
||||
const entries = Array.from(this.cache.entries())
|
||||
.sort((a, b) => a[1].cachedAt.getTime() - b[1].cachedAt.getTime());
|
||||
|
||||
for (const [key] of entries) {
|
||||
if (evicted >= evictCount) break;
|
||||
this.cache.delete(key);
|
||||
evicted++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start periodic cleanup of expired entries.
|
||||
*/
|
||||
private startCleanup(): void {
|
||||
// Run cleanup every minute
|
||||
this.cleanupInterval = setInterval(() => {
|
||||
this.cleanup();
|
||||
}, 60000);
|
||||
|
||||
// Don't keep the process alive just for cleanup
|
||||
if (this.cleanupInterval.unref) {
|
||||
this.cleanupInterval.unref();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all expired entries.
|
||||
*/
|
||||
private cleanup(): void {
|
||||
const now = new Date();
|
||||
const staleDeadline = new Date(now.getTime() - this.config.staleMaxAgeSeconds * 1000);
|
||||
|
||||
for (const [key, entry] of this.cache.entries()) {
|
||||
if (entry.expiresAt) {
|
||||
// Remove if past stale deadline
|
||||
if (entry.expiresAt < staleDeadline) {
|
||||
this.cache.delete(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for cache set operation.
|
||||
*/
|
||||
export interface ICacheSetOptions {
|
||||
/** Override TTL in seconds */
|
||||
ttlSeconds?: number;
|
||||
/** ETag for conditional requests */
|
||||
etag?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache statistics.
|
||||
*/
|
||||
export interface ICacheStats {
|
||||
/** Total number of cached entries */
|
||||
totalEntries: number;
|
||||
/** Number of fresh (non-expired) entries */
|
||||
freshEntries: number;
|
||||
/** Number of stale entries (expired but still usable) */
|
||||
staleEntries: number;
|
||||
/** Number of negative cache entries */
|
||||
negativeEntries: number;
|
||||
/** Total size of cached data in bytes */
|
||||
totalSizeBytes: number;
|
||||
/** Maximum allowed entries */
|
||||
maxEntries: number;
|
||||
/** Whether caching is enabled */
|
||||
enabled: boolean;
|
||||
}
|
||||
11
ts/upstream/index.ts
Normal file
11
ts/upstream/index.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
// Interfaces and types
|
||||
export * from './interfaces.upstream.js';
|
||||
|
||||
// Classes
|
||||
export { CircuitBreaker, CircuitOpenError, withCircuitBreaker } from './classes.circuitbreaker.js';
|
||||
export type { ICircuitBreakerMetrics } from './classes.circuitbreaker.js';
|
||||
|
||||
export { UpstreamCache } from './classes.upstreamcache.js';
|
||||
export type { ICacheSetOptions, ICacheStats } from './classes.upstreamcache.js';
|
||||
|
||||
export { BaseUpstream } from './classes.baseupstream.js';
|
||||
195
ts/upstream/interfaces.upstream.ts
Normal file
195
ts/upstream/interfaces.upstream.ts
Normal file
@@ -0,0 +1,195 @@
|
||||
import type { TRegistryProtocol } from '../core/interfaces.core.js';
|
||||
|
||||
/**
|
||||
* Scope rule for routing requests to specific upstreams.
|
||||
* Uses glob patterns for flexible matching.
|
||||
*/
|
||||
export interface IUpstreamScopeRule {
|
||||
/** Glob pattern (e.g., "@company/*", "com.example.*", "library/*") */
|
||||
pattern: string;
|
||||
/** Whether matching resources should be included or excluded */
|
||||
action: 'include' | 'exclude';
|
||||
}
|
||||
|
||||
/**
|
||||
* Authentication configuration for an upstream registry.
|
||||
* Supports multiple auth strategies.
|
||||
*/
|
||||
export interface IUpstreamAuthConfig {
|
||||
/** Authentication type */
|
||||
type: 'none' | 'basic' | 'bearer' | 'api-key';
|
||||
/** Username for basic auth */
|
||||
username?: string;
|
||||
/** Password for basic auth */
|
||||
password?: string;
|
||||
/** Token for bearer or api-key auth */
|
||||
token?: string;
|
||||
/** Custom header name for api-key auth (default: 'Authorization') */
|
||||
headerName?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache configuration for upstream content.
|
||||
*/
|
||||
export interface IUpstreamCacheConfig {
|
||||
/** Whether caching is enabled */
|
||||
enabled: boolean;
|
||||
/** Default TTL in seconds for mutable content (default: 300 = 5 min) */
|
||||
defaultTtlSeconds: number;
|
||||
/** TTL in seconds for immutable/content-addressable content (default: 2592000 = 30 days) */
|
||||
immutableTtlSeconds: number;
|
||||
/** Whether to serve stale content while revalidating in background */
|
||||
staleWhileRevalidate: boolean;
|
||||
/** Maximum age in seconds for stale content (default: 3600 = 1 hour) */
|
||||
staleMaxAgeSeconds: number;
|
||||
/** TTL in seconds for negative cache entries (404s) (default: 60 = 1 min) */
|
||||
negativeCacheTtlSeconds: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resilience configuration for upstream requests.
|
||||
*/
|
||||
export interface IUpstreamResilienceConfig {
|
||||
/** Request timeout in milliseconds (default: 30000) */
|
||||
timeoutMs: number;
|
||||
/** Maximum number of retry attempts (default: 3) */
|
||||
maxRetries: number;
|
||||
/** Initial retry delay in milliseconds (default: 1000) */
|
||||
retryDelayMs: number;
|
||||
/** Maximum retry delay in milliseconds (default: 30000) */
|
||||
retryMaxDelayMs: number;
|
||||
/** Number of failures before circuit breaker opens (default: 5) */
|
||||
circuitBreakerThreshold: number;
|
||||
/** Time in milliseconds before circuit breaker attempts reset (default: 30000) */
|
||||
circuitBreakerResetMs: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration for a single upstream registry.
|
||||
*/
|
||||
export interface IUpstreamRegistryConfig {
|
||||
/** Unique identifier for this upstream */
|
||||
id: string;
|
||||
/** Human-readable name */
|
||||
name: string;
|
||||
/** Base URL of the upstream registry (e.g., "https://registry.npmjs.org") */
|
||||
url: string;
|
||||
/** Priority for routing (lower = higher priority, 1 = first) */
|
||||
priority: number;
|
||||
/** Whether this upstream is enabled */
|
||||
enabled: boolean;
|
||||
/** Scope rules for routing (empty = match all) */
|
||||
scopeRules?: IUpstreamScopeRule[];
|
||||
/** Authentication configuration */
|
||||
auth: IUpstreamAuthConfig;
|
||||
/** Cache configuration overrides */
|
||||
cache?: Partial<IUpstreamCacheConfig>;
|
||||
/** Resilience configuration overrides */
|
||||
resilience?: Partial<IUpstreamResilienceConfig>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Protocol-level upstream configuration.
|
||||
* Configures upstream behavior for a specific protocol (npm, oci, etc.)
|
||||
*/
|
||||
export interface IProtocolUpstreamConfig {
|
||||
/** Whether upstream is enabled for this protocol */
|
||||
enabled: boolean;
|
||||
/** List of upstream registries, ordered by priority */
|
||||
upstreams: IUpstreamRegistryConfig[];
|
||||
/** Protocol-level cache configuration defaults */
|
||||
cache?: Partial<IUpstreamCacheConfig>;
|
||||
/** Protocol-level resilience configuration defaults */
|
||||
resilience?: Partial<IUpstreamResilienceConfig>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of an upstream fetch operation.
|
||||
*/
|
||||
export interface IUpstreamResult {
|
||||
/** Whether the fetch was successful (2xx status) */
|
||||
success: boolean;
|
||||
/** HTTP status code */
|
||||
status: number;
|
||||
/** Response headers */
|
||||
headers: Record<string, string>;
|
||||
/** Response body (Buffer for binary, object for JSON) */
|
||||
body?: Buffer | any;
|
||||
/** ID of the upstream that served the request */
|
||||
upstreamId: string;
|
||||
/** Whether the response was served from cache */
|
||||
fromCache: boolean;
|
||||
/** Request latency in milliseconds */
|
||||
latencyMs: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Circuit breaker state.
|
||||
*/
|
||||
export type TCircuitState = 'CLOSED' | 'OPEN' | 'HALF_OPEN';
|
||||
|
||||
/**
|
||||
* Context for an upstream fetch request.
|
||||
*/
|
||||
export interface IUpstreamFetchContext {
|
||||
/** Protocol type */
|
||||
protocol: TRegistryProtocol;
|
||||
/** Resource identifier (package name, artifact name, etc.) */
|
||||
resource: string;
|
||||
/** Type of resource being fetched (packument, tarball, manifest, blob, etc.) */
|
||||
resourceType: string;
|
||||
/** Original request path */
|
||||
path: string;
|
||||
/** HTTP method */
|
||||
method: string;
|
||||
/** Request headers */
|
||||
headers: Record<string, string>;
|
||||
/** Query parameters */
|
||||
query: Record<string, string>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache entry stored in the upstream cache.
|
||||
*/
|
||||
export interface ICacheEntry {
|
||||
/** Cached data */
|
||||
data: Buffer;
|
||||
/** Content type of the cached data */
|
||||
contentType: string;
|
||||
/** Original response headers */
|
||||
headers: Record<string, string>;
|
||||
/** When the entry was cached */
|
||||
cachedAt: Date;
|
||||
/** When the entry expires */
|
||||
expiresAt?: Date;
|
||||
/** ETag for conditional requests */
|
||||
etag?: string;
|
||||
/** ID of the upstream that provided the data */
|
||||
upstreamId: string;
|
||||
/** Whether the entry is stale but still usable */
|
||||
stale?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Default cache configuration values.
|
||||
*/
|
||||
export const DEFAULT_CACHE_CONFIG: IUpstreamCacheConfig = {
|
||||
enabled: true,
|
||||
defaultTtlSeconds: 300, // 5 minutes
|
||||
immutableTtlSeconds: 2592000, // 30 days
|
||||
staleWhileRevalidate: true,
|
||||
staleMaxAgeSeconds: 3600, // 1 hour
|
||||
negativeCacheTtlSeconds: 60, // 1 minute
|
||||
};
|
||||
|
||||
/**
|
||||
* Default resilience configuration values.
|
||||
*/
|
||||
export const DEFAULT_RESILIENCE_CONFIG: IUpstreamResilienceConfig = {
|
||||
timeoutMs: 30000,
|
||||
maxRetries: 3,
|
||||
retryDelayMs: 1000,
|
||||
retryMaxDelayMs: 30000,
|
||||
circuitBreakerThreshold: 5,
|
||||
circuitBreakerResetMs: 30000,
|
||||
};
|
||||
Reference in New Issue
Block a user