BREAKING CHANGE(core): Refactor to v3: introduce modular core/domain architecture, plugin system, observability and strict TypeScript configuration; remove legacy classes
This commit is contained in:
136
ts/domain/logging/enrichers.ts
Normal file
136
ts/domain/logging/enrichers.ts
Normal file
@@ -0,0 +1,136 @@
|
||||
/**
|
||||
* Common log enrichers
|
||||
*/
|
||||
|
||||
import type { LogEntry, LogEnricher } from './types.js';
|
||||
import { hostname } from 'os';
|
||||
|
||||
/**
|
||||
* Add hostname to log entry
|
||||
*/
|
||||
export const addHostInfo: LogEnricher = (entry: LogEntry): LogEntry => {
|
||||
return {
|
||||
...entry,
|
||||
host: hostname(),
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Add environment from NODE_ENV
|
||||
*/
|
||||
export const addEnvironment: LogEnricher = (entry: LogEntry): LogEntry => {
|
||||
return {
|
||||
...entry,
|
||||
environment: process.env.NODE_ENV || 'development',
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Add service info from environment variables
|
||||
*/
|
||||
export const addServiceInfo: LogEnricher = (entry: LogEntry): LogEntry => {
|
||||
return {
|
||||
...entry,
|
||||
service: entry.service || process.env.SERVICE_NAME,
|
||||
version: entry.version || process.env.SERVICE_VERSION,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Add process info (PID, memory, uptime)
|
||||
*/
|
||||
export const addProcessInfo: LogEnricher = (entry: LogEntry): LogEntry => {
|
||||
const memoryUsage = process.memoryUsage();
|
||||
|
||||
return {
|
||||
...entry,
|
||||
metadata: {
|
||||
...entry.metadata,
|
||||
process: {
|
||||
pid: process.pid,
|
||||
uptime: process.uptime(),
|
||||
memory: {
|
||||
heapUsed: memoryUsage.heapUsed,
|
||||
heapTotal: memoryUsage.heapTotal,
|
||||
external: memoryUsage.external,
|
||||
rss: memoryUsage.rss,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Add timestamp if not present
|
||||
*/
|
||||
export const addTimestamp: LogEnricher = (entry: LogEntry): LogEntry => {
|
||||
return {
|
||||
...entry,
|
||||
timestamp: entry.timestamp || new Date().toISOString(),
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Sanitize sensitive data from log entry
|
||||
*/
|
||||
export const sanitizeSensitiveData = (
|
||||
patterns: Array<{ path: string; replacement?: string }>
|
||||
): LogEnricher => {
|
||||
return (entry: LogEntry): LogEntry => {
|
||||
const sanitized = { ...entry };
|
||||
|
||||
for (const { path, replacement = '[REDACTED]' } of patterns) {
|
||||
const parts = path.split('.');
|
||||
let current: any = sanitized;
|
||||
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
if (current === null || current === undefined) break;
|
||||
current = current[parts[i] as string];
|
||||
}
|
||||
|
||||
if (current && parts.length > 0) {
|
||||
const lastPart = parts[parts.length - 1];
|
||||
if (lastPart && current[lastPart] !== undefined) {
|
||||
current[lastPart] = replacement;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return sanitized;
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Add custom tags based on log content
|
||||
*/
|
||||
export const addDynamicTags = (
|
||||
taggers: Array<{ condition: (entry: LogEntry) => boolean; tag: string }>
|
||||
): LogEnricher => {
|
||||
return (entry: LogEntry): LogEntry => {
|
||||
const tags = new Set(entry.tags || []);
|
||||
|
||||
for (const { condition, tag } of taggers) {
|
||||
if (condition(entry)) {
|
||||
tags.add(tag);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...entry,
|
||||
tags: Array.from(tags),
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Chain multiple enrichers
|
||||
*/
|
||||
export const chainEnrichers = (...enrichers: LogEnricher[]): LogEnricher => {
|
||||
return async (entry: LogEntry): Promise<LogEntry> => {
|
||||
let enriched = entry;
|
||||
for (const enricher of enrichers) {
|
||||
enriched = await enricher(enriched);
|
||||
}
|
||||
return enriched;
|
||||
};
|
||||
};
|
||||
33
ts/domain/logging/index.ts
Normal file
33
ts/domain/logging/index.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
/**
|
||||
* Logging Domain Module
|
||||
*
|
||||
* Enterprise logging with structured log ingestion
|
||||
*/
|
||||
|
||||
// Main classes
|
||||
export { LogDestination, createLogDestination } from './log-destination.js';
|
||||
|
||||
// Enrichers
|
||||
export {
|
||||
addHostInfo,
|
||||
addEnvironment,
|
||||
addServiceInfo,
|
||||
addProcessInfo,
|
||||
addTimestamp,
|
||||
sanitizeSensitiveData,
|
||||
addDynamicTags,
|
||||
chainEnrichers,
|
||||
} from './enrichers.js';
|
||||
|
||||
// Types
|
||||
export type {
|
||||
LogEntry,
|
||||
LogEnricher,
|
||||
SamplingStrategy,
|
||||
SamplingConfig,
|
||||
ILMPolicyConfig,
|
||||
MetricExtraction,
|
||||
LogDestinationConfig,
|
||||
LogBatchResult,
|
||||
LogDestinationStats,
|
||||
} from './types.js';
|
||||
569
ts/domain/logging/log-destination.ts
Normal file
569
ts/domain/logging/log-destination.ts
Normal file
@@ -0,0 +1,569 @@
|
||||
import type {
|
||||
LogEntry,
|
||||
LogDestinationConfig,
|
||||
LogBatchResult,
|
||||
LogDestinationStats,
|
||||
SamplingConfig,
|
||||
ILMPolicyConfig,
|
||||
MetricExtraction,
|
||||
} from './types.js';
|
||||
import { ElasticsearchConnectionManager } from '../../core/connection/connection-manager.js';
|
||||
import { defaultLogger } from '../../core/observability/logger.js';
|
||||
import { defaultMetrics } from '../../core/observability/metrics.js';
|
||||
import { defaultTracing } from '../../core/observability/tracing.js';
|
||||
|
||||
/**
|
||||
* Enterprise-grade log destination for Elasticsearch
|
||||
*
|
||||
* Features:
|
||||
* - Batched bulk indexing with configurable batch size
|
||||
* - Automatic flushing at intervals
|
||||
* - Log enrichment pipeline
|
||||
* - Sampling strategies (all, errors-only, percentage, rate-limit)
|
||||
* - ILM (Index Lifecycle Management) integration
|
||||
* - Metric extraction from logs
|
||||
* - Auto index template creation
|
||||
* - Queue overflow protection
|
||||
* - Full observability integration
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const logDest = new LogDestination({
|
||||
* indexPattern: 'logs-myapp-{now/d}',
|
||||
* batchSize: 100,
|
||||
* flushIntervalMs: 5000,
|
||||
* sampling: {
|
||||
* strategy: 'percentage',
|
||||
* percentage: 10,
|
||||
* alwaysSampleErrors: true
|
||||
* },
|
||||
* enrichers: [addHostInfo, addEnvironment],
|
||||
* ilm: {
|
||||
* name: 'logs-policy',
|
||||
* hotDuration: '7d',
|
||||
* deleteDuration: '30d'
|
||||
* }
|
||||
* });
|
||||
*
|
||||
* await logDest.initialize();
|
||||
* await logDest.send({
|
||||
* timestamp: new Date().toISOString(),
|
||||
* level: 'INFO',
|
||||
* message: 'User logged in',
|
||||
* metadata: { userId: '123' }
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export class LogDestination {
|
||||
private config: Required<LogDestinationConfig>;
|
||||
private queue: LogEntry[] = [];
|
||||
private flushTimer?: NodeJS.Timeout;
|
||||
private stats: LogDestinationStats = {
|
||||
totalLogs: 0,
|
||||
totalSuccessful: 0,
|
||||
totalFailed: 0,
|
||||
totalSampled: 0,
|
||||
totalDropped: 0,
|
||||
queueSize: 0,
|
||||
avgBatchDurationMs: 0,
|
||||
};
|
||||
private batchDurations: number[] = [];
|
||||
private lastRateLimitReset = Date.now();
|
||||
private rateLimitCounter = 0;
|
||||
private initialized = false;
|
||||
|
||||
constructor(config: LogDestinationConfig) {
|
||||
this.config = {
|
||||
indexPattern: config.indexPattern,
|
||||
batchSize: config.batchSize ?? 100,
|
||||
flushIntervalMs: config.flushIntervalMs ?? 5000,
|
||||
maxQueueSize: config.maxQueueSize ?? 10000,
|
||||
enrichers: config.enrichers ?? [],
|
||||
sampling: config.sampling ?? { strategy: 'all', alwaysSampleErrors: true },
|
||||
ilm: config.ilm,
|
||||
metrics: config.metrics ?? [],
|
||||
autoCreateTemplate: config.autoCreateTemplate ?? true,
|
||||
templateSettings: config.templateSettings ?? {
|
||||
numberOfShards: 1,
|
||||
numberOfReplicas: 1,
|
||||
refreshInterval: '5s',
|
||||
codec: 'best_compression',
|
||||
},
|
||||
templateMappings: config.templateMappings ?? {},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new log destination
|
||||
*/
|
||||
static create(config: LogDestinationConfig): LogDestination {
|
||||
return new LogDestination(config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the log destination (create template, ILM policy)
|
||||
*/
|
||||
async initialize(): Promise<void> {
|
||||
if (this.initialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
const span = defaultTracing.createSpan('logDestination.initialize');
|
||||
|
||||
try {
|
||||
// Create ILM policy if configured
|
||||
if (this.config.ilm) {
|
||||
await this.createILMPolicy(this.config.ilm);
|
||||
}
|
||||
|
||||
// Create index template if enabled
|
||||
if (this.config.autoCreateTemplate) {
|
||||
await this.createIndexTemplate();
|
||||
}
|
||||
|
||||
// Start flush timer
|
||||
this.startFlushTimer();
|
||||
|
||||
this.initialized = true;
|
||||
defaultLogger.info('Log destination initialized', {
|
||||
indexPattern: this.config.indexPattern,
|
||||
batchSize: this.config.batchSize,
|
||||
flushIntervalMs: this.config.flushIntervalMs,
|
||||
});
|
||||
|
||||
span.end();
|
||||
} catch (error) {
|
||||
defaultLogger.error('Failed to initialize log destination', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
span.recordException(error as Error);
|
||||
span.end();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a log entry
|
||||
*/
|
||||
async send(entry: LogEntry): Promise<void> {
|
||||
this.stats.totalLogs++;
|
||||
|
||||
// Apply sampling
|
||||
if (!this.shouldSample(entry)) {
|
||||
this.stats.totalSampled++;
|
||||
return;
|
||||
}
|
||||
|
||||
// Apply enrichers
|
||||
let enrichedEntry = entry;
|
||||
for (const enricher of this.config.enrichers) {
|
||||
enrichedEntry = await enricher(enrichedEntry);
|
||||
}
|
||||
|
||||
// Extract metrics if configured
|
||||
if (this.config.metrics.length > 0) {
|
||||
this.extractMetrics(enrichedEntry);
|
||||
}
|
||||
|
||||
// Check queue size
|
||||
if (this.queue.length >= this.config.maxQueueSize) {
|
||||
this.stats.totalDropped++;
|
||||
defaultLogger.warn('Log queue overflow, dropping log', {
|
||||
queueSize: this.queue.length,
|
||||
maxQueueSize: this.config.maxQueueSize,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Add to queue
|
||||
this.queue.push(enrichedEntry);
|
||||
this.stats.queueSize = this.queue.length;
|
||||
|
||||
// Flush if batch size reached
|
||||
if (this.queue.length >= this.config.batchSize) {
|
||||
await this.flush();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send multiple log entries
|
||||
*/
|
||||
async sendBatch(entries: LogEntry[]): Promise<void> {
|
||||
for (const entry of entries) {
|
||||
await this.send(entry);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Flush pending logs immediately
|
||||
*/
|
||||
async flush(): Promise<LogBatchResult | null> {
|
||||
if (this.queue.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const span = defaultTracing.createSpan('logDestination.flush', {
|
||||
'batch.size': this.queue.length,
|
||||
});
|
||||
|
||||
const startTime = Date.now();
|
||||
const batch = this.queue.splice(0, this.config.batchSize);
|
||||
this.stats.queueSize = this.queue.length;
|
||||
|
||||
try {
|
||||
const client = ElasticsearchConnectionManager.getInstance().getClient();
|
||||
|
||||
// Build bulk operations
|
||||
const operations = batch.flatMap((entry) => [
|
||||
{ index: { _index: this.resolveIndexName() } },
|
||||
entry,
|
||||
]);
|
||||
|
||||
// Execute bulk request
|
||||
const result = await client.bulk({ operations });
|
||||
|
||||
const durationMs = Date.now() - startTime;
|
||||
this.batchDurations.push(durationMs);
|
||||
if (this.batchDurations.length > 100) {
|
||||
this.batchDurations.shift();
|
||||
}
|
||||
this.stats.avgBatchDurationMs =
|
||||
this.batchDurations.reduce((a, b) => a + b, 0) / this.batchDurations.length;
|
||||
this.stats.lastFlushAt = new Date();
|
||||
|
||||
// Process results
|
||||
const errors: Array<{ log: LogEntry; error: string }> = [];
|
||||
let successful = 0;
|
||||
let failed = 0;
|
||||
|
||||
if (result.items) {
|
||||
result.items.forEach((item, index) => {
|
||||
const operation = item.index || item.create || item.update;
|
||||
if (operation && operation.error) {
|
||||
failed++;
|
||||
errors.push({
|
||||
log: batch[index] as LogEntry,
|
||||
error: JSON.stringify(operation.error),
|
||||
});
|
||||
} else {
|
||||
successful++;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
this.stats.totalSuccessful += successful;
|
||||
this.stats.totalFailed += failed;
|
||||
|
||||
// Record metrics
|
||||
defaultMetrics.requestsTotal.inc({ operation: 'log_flush', result: 'success' });
|
||||
defaultMetrics.requestDuration.observe({ operation: 'log_flush' }, durationMs);
|
||||
|
||||
if (failed > 0) {
|
||||
defaultLogger.warn('Some logs failed to index', {
|
||||
successful,
|
||||
failed,
|
||||
errors: errors.slice(0, 5), // Log first 5 errors
|
||||
});
|
||||
}
|
||||
|
||||
span.setAttributes({
|
||||
'batch.successful': successful,
|
||||
'batch.failed': failed,
|
||||
'batch.duration_ms': durationMs,
|
||||
});
|
||||
span.end();
|
||||
|
||||
return {
|
||||
successful,
|
||||
failed,
|
||||
total: batch.length,
|
||||
errors: errors.length > 0 ? errors : undefined,
|
||||
durationMs,
|
||||
};
|
||||
} catch (error) {
|
||||
this.stats.totalFailed += batch.length;
|
||||
defaultMetrics.requestErrors.inc({ operation: 'log_flush' });
|
||||
|
||||
defaultLogger.error('Failed to flush logs', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
batchSize: batch.length,
|
||||
});
|
||||
|
||||
span.recordException(error as Error);
|
||||
span.end();
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get destination statistics
|
||||
*/
|
||||
getStats(): LogDestinationStats {
|
||||
return { ...this.stats };
|
||||
}
|
||||
|
||||
/**
|
||||
* Destroy the destination (flush pending logs and stop timer)
|
||||
*/
|
||||
async destroy(): Promise<void> {
|
||||
if (this.flushTimer) {
|
||||
clearInterval(this.flushTimer);
|
||||
}
|
||||
|
||||
// Flush remaining logs
|
||||
if (this.queue.length > 0) {
|
||||
await this.flush();
|
||||
}
|
||||
|
||||
this.initialized = false;
|
||||
defaultLogger.info('Log destination destroyed', {
|
||||
stats: this.stats,
|
||||
});
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Private Methods
|
||||
// ============================================================================
|
||||
|
||||
private startFlushTimer(): void {
|
||||
this.flushTimer = setInterval(async () => {
|
||||
if (this.queue.length > 0) {
|
||||
try {
|
||||
await this.flush();
|
||||
} catch (error) {
|
||||
defaultLogger.error('Flush timer error', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
}, this.config.flushIntervalMs);
|
||||
}
|
||||
|
||||
private shouldSample(entry: LogEntry): boolean {
|
||||
const sampling = this.config.sampling;
|
||||
|
||||
// Always sample errors if configured
|
||||
if (sampling.alwaysSampleErrors && entry.level === 'ERROR') {
|
||||
return true;
|
||||
}
|
||||
|
||||
switch (sampling.strategy) {
|
||||
case 'all':
|
||||
return true;
|
||||
|
||||
case 'errors-only':
|
||||
return entry.level === 'ERROR';
|
||||
|
||||
case 'percentage':
|
||||
return Math.random() * 100 < (sampling.percentage ?? 100);
|
||||
|
||||
case 'rate-limit': {
|
||||
const now = Date.now();
|
||||
if (now - this.lastRateLimitReset >= 1000) {
|
||||
this.lastRateLimitReset = now;
|
||||
this.rateLimitCounter = 0;
|
||||
}
|
||||
this.rateLimitCounter++;
|
||||
return this.rateLimitCounter <= (sampling.maxLogsPerSecond ?? 100);
|
||||
}
|
||||
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
private resolveIndexName(): string {
|
||||
// Support date math in index pattern
|
||||
const pattern = this.config.indexPattern;
|
||||
|
||||
// Simple date math support for {now/d}
|
||||
if (pattern.includes('{now/d}')) {
|
||||
const date = new Date().toISOString().split('T')[0];
|
||||
return pattern.replace('{now/d}', date);
|
||||
}
|
||||
|
||||
// Support {now/M} for month
|
||||
if (pattern.includes('{now/M}')) {
|
||||
const date = new Date();
|
||||
const month = `${date.getFullYear()}.${String(date.getMonth() + 1).padStart(2, '0')}`;
|
||||
return pattern.replace('{now/M}', month);
|
||||
}
|
||||
|
||||
return pattern;
|
||||
}
|
||||
|
||||
private extractMetrics(entry: LogEntry): void {
|
||||
for (const metric of this.config.metrics) {
|
||||
const value = this.getNestedValue(entry, metric.field);
|
||||
if (value === undefined) continue;
|
||||
|
||||
const labels: Record<string, string> = {};
|
||||
if (metric.labels) {
|
||||
for (const labelField of metric.labels) {
|
||||
const labelValue = this.getNestedValue(entry, labelField);
|
||||
if (labelValue !== undefined) {
|
||||
labels[labelField] = String(labelValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
switch (metric.type) {
|
||||
case 'counter':
|
||||
defaultMetrics.requestsTotal.inc({ ...labels, metric: metric.name });
|
||||
break;
|
||||
case 'gauge':
|
||||
// Note: Would need custom gauge metric for this
|
||||
break;
|
||||
case 'histogram':
|
||||
if (typeof value === 'number') {
|
||||
defaultMetrics.requestDuration.observe({ ...labels, metric: metric.name }, value);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private getNestedValue(obj: unknown, path: string): unknown {
|
||||
const parts = path.split('.');
|
||||
let current = obj;
|
||||
|
||||
for (const part of parts) {
|
||||
if (current === null || current === undefined || typeof current !== 'object') {
|
||||
return undefined;
|
||||
}
|
||||
current = (current as Record<string, unknown>)[part];
|
||||
}
|
||||
|
||||
return current;
|
||||
}
|
||||
|
||||
private async createILMPolicy(ilm: ILMPolicyConfig): Promise<void> {
|
||||
const client = ElasticsearchConnectionManager.getInstance().getClient();
|
||||
|
||||
const policy = {
|
||||
policy: {
|
||||
phases: {
|
||||
...(ilm.hotDuration && {
|
||||
hot: {
|
||||
actions: {
|
||||
...(ilm.rollover && { rollover: ilm.rollover }),
|
||||
},
|
||||
},
|
||||
}),
|
||||
...(ilm.warmDuration && {
|
||||
warm: {
|
||||
min_age: ilm.warmDuration,
|
||||
actions: {
|
||||
shrink: { number_of_shards: 1 },
|
||||
forcemerge: { max_num_segments: 1 },
|
||||
},
|
||||
},
|
||||
}),
|
||||
...(ilm.coldDuration && {
|
||||
cold: {
|
||||
min_age: ilm.coldDuration,
|
||||
actions: {
|
||||
freeze: {},
|
||||
},
|
||||
},
|
||||
}),
|
||||
...(ilm.deleteDuration && {
|
||||
delete: {
|
||||
min_age: ilm.deleteDuration,
|
||||
actions: {
|
||||
delete: {},
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
await client.ilm.putLifecycle({
|
||||
name: ilm.name,
|
||||
...policy,
|
||||
});
|
||||
defaultLogger.info('ILM policy created', { policy: ilm.name });
|
||||
} catch (error) {
|
||||
defaultLogger.warn('Failed to create ILM policy (may already exist)', {
|
||||
policy: ilm.name,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private async createIndexTemplate(): Promise<void> {
|
||||
const client = ElasticsearchConnectionManager.getInstance().getClient();
|
||||
|
||||
const templateName = `logs-${this.config.indexPattern.split('-')[1] || 'default'}-template`;
|
||||
const indexPattern = this.config.indexPattern.replace(/\{.*?\}/g, '*');
|
||||
|
||||
const template = {
|
||||
index_patterns: [indexPattern],
|
||||
template: {
|
||||
settings: {
|
||||
number_of_shards: this.config.templateSettings.numberOfShards,
|
||||
number_of_replicas: this.config.templateSettings.numberOfReplicas,
|
||||
refresh_interval: this.config.templateSettings.refreshInterval,
|
||||
codec: this.config.templateSettings.codec,
|
||||
...(this.config.ilm && {
|
||||
'index.lifecycle.name': this.config.ilm.name,
|
||||
'index.lifecycle.rollover_alias': indexPattern,
|
||||
}),
|
||||
},
|
||||
mappings: {
|
||||
properties: {
|
||||
timestamp: { type: 'date' },
|
||||
level: { type: 'keyword' },
|
||||
message: { type: 'text' },
|
||||
correlationId: { type: 'keyword' },
|
||||
service: { type: 'keyword' },
|
||||
version: { type: 'keyword' },
|
||||
host: { type: 'keyword' },
|
||||
environment: { type: 'keyword' },
|
||||
tags: { type: 'keyword' },
|
||||
metadata: { type: 'object', enabled: false },
|
||||
error: {
|
||||
properties: {
|
||||
name: { type: 'keyword' },
|
||||
message: { type: 'text' },
|
||||
stack: { type: 'text' },
|
||||
code: { type: 'keyword' },
|
||||
},
|
||||
},
|
||||
metrics: {
|
||||
properties: {
|
||||
duration: { type: 'long' },
|
||||
memory: { type: 'long' },
|
||||
cpu: { type: 'float' },
|
||||
},
|
||||
},
|
||||
...this.config.templateMappings,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
await client.indices.putIndexTemplate({
|
||||
name: templateName,
|
||||
...template,
|
||||
});
|
||||
defaultLogger.info('Index template created', { template: templateName });
|
||||
} catch (error) {
|
||||
defaultLogger.warn('Failed to create index template (may already exist)', {
|
||||
template: templateName,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new log destination
|
||||
*/
|
||||
export function createLogDestination(config: LogDestinationConfig): LogDestination {
|
||||
return new LogDestination(config);
|
||||
}
|
||||
221
ts/domain/logging/types.ts
Normal file
221
ts/domain/logging/types.ts
Normal file
@@ -0,0 +1,221 @@
|
||||
/**
|
||||
* Logging domain types for structured log ingestion into Elasticsearch
|
||||
*/
|
||||
|
||||
import type { LogLevel } from '../../core/observability/logger.js';
|
||||
|
||||
/**
|
||||
* Log entry structure
|
||||
*/
|
||||
export interface LogEntry {
|
||||
/** ISO timestamp */
|
||||
timestamp: string;
|
||||
|
||||
/** Log level */
|
||||
level: LogLevel;
|
||||
|
||||
/** Log message */
|
||||
message: string;
|
||||
|
||||
/** Optional correlation ID for request tracing */
|
||||
correlationId?: string;
|
||||
|
||||
/** Service name */
|
||||
service?: string;
|
||||
|
||||
/** Service version */
|
||||
version?: string;
|
||||
|
||||
/** Hostname or container ID */
|
||||
host?: string;
|
||||
|
||||
/** Environment (production, staging, development) */
|
||||
environment?: string;
|
||||
|
||||
/** Additional structured data */
|
||||
metadata?: Record<string, unknown>;
|
||||
|
||||
/** Error details if log is error level */
|
||||
error?: {
|
||||
name: string;
|
||||
message: string;
|
||||
stack?: string;
|
||||
code?: string;
|
||||
};
|
||||
|
||||
/** Performance metrics */
|
||||
metrics?: {
|
||||
duration?: number;
|
||||
memory?: number;
|
||||
cpu?: number;
|
||||
};
|
||||
|
||||
/** Tags for categorization */
|
||||
tags?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Log enrichment function
|
||||
*/
|
||||
export type LogEnricher = (entry: LogEntry) => LogEntry | Promise<LogEntry>;
|
||||
|
||||
/**
|
||||
* Log sampling strategy
|
||||
*/
|
||||
export type SamplingStrategy = 'all' | 'errors-only' | 'percentage' | 'rate-limit';
|
||||
|
||||
/**
|
||||
* Sampling configuration
|
||||
*/
|
||||
export interface SamplingConfig {
|
||||
/** Sampling strategy */
|
||||
strategy: SamplingStrategy;
|
||||
|
||||
/** For percentage strategy: 0-100 */
|
||||
percentage?: number;
|
||||
|
||||
/** For rate-limit strategy: logs per second */
|
||||
maxLogsPerSecond?: number;
|
||||
|
||||
/** Always sample errors regardless of strategy */
|
||||
alwaysSampleErrors?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* ILM (Index Lifecycle Management) policy configuration
|
||||
*/
|
||||
export interface ILMPolicyConfig {
|
||||
/** Policy name */
|
||||
name: string;
|
||||
|
||||
/** Hot phase: how long to keep in hot tier */
|
||||
hotDuration?: string; // e.g., "7d"
|
||||
|
||||
/** Warm phase: move to warm tier after */
|
||||
warmDuration?: string; // e.g., "30d"
|
||||
|
||||
/** Cold phase: move to cold tier after */
|
||||
coldDuration?: string; // e.g., "90d"
|
||||
|
||||
/** Delete phase: delete after */
|
||||
deleteDuration?: string; // e.g., "365d"
|
||||
|
||||
/** Rollover settings */
|
||||
rollover?: {
|
||||
maxSize?: string; // e.g., "50gb"
|
||||
maxAge?: string; // e.g., "1d"
|
||||
maxDocs?: number;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Metric extraction pattern
|
||||
*/
|
||||
export interface MetricExtraction {
|
||||
/** Metric name */
|
||||
name: string;
|
||||
|
||||
/** Field path to extract (dot notation) */
|
||||
field: string;
|
||||
|
||||
/** Metric type */
|
||||
type: 'counter' | 'gauge' | 'histogram';
|
||||
|
||||
/** Optional labels to extract */
|
||||
labels?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Log destination configuration
|
||||
*/
|
||||
export interface LogDestinationConfig {
|
||||
/** Index name pattern (supports date math) */
|
||||
indexPattern: string;
|
||||
|
||||
/** Batch size for bulk operations */
|
||||
batchSize?: number;
|
||||
|
||||
/** Flush interval in milliseconds */
|
||||
flushIntervalMs?: number;
|
||||
|
||||
/** Maximum queue size before dropping logs */
|
||||
maxQueueSize?: number;
|
||||
|
||||
/** Enrichers to apply */
|
||||
enrichers?: LogEnricher[];
|
||||
|
||||
/** Sampling configuration */
|
||||
sampling?: SamplingConfig;
|
||||
|
||||
/** ILM policy */
|
||||
ilm?: ILMPolicyConfig;
|
||||
|
||||
/** Metric extractions */
|
||||
metrics?: MetricExtraction[];
|
||||
|
||||
/** Auto-create index template */
|
||||
autoCreateTemplate?: boolean;
|
||||
|
||||
/** Custom index template settings */
|
||||
templateSettings?: {
|
||||
numberOfShards?: number;
|
||||
numberOfReplicas?: number;
|
||||
refreshInterval?: string;
|
||||
codec?: 'default' | 'best_compression';
|
||||
};
|
||||
|
||||
/** Custom index mappings */
|
||||
templateMappings?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch result for log ingestion
|
||||
*/
|
||||
export interface LogBatchResult {
|
||||
/** Number of successfully indexed logs */
|
||||
successful: number;
|
||||
|
||||
/** Number of failed logs */
|
||||
failed: number;
|
||||
|
||||
/** Total logs in batch */
|
||||
total: number;
|
||||
|
||||
/** Errors encountered */
|
||||
errors?: Array<{
|
||||
log: LogEntry;
|
||||
error: string;
|
||||
}>;
|
||||
|
||||
/** Time taken in milliseconds */
|
||||
durationMs: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Log destination statistics
|
||||
*/
|
||||
export interface LogDestinationStats {
|
||||
/** Total logs sent */
|
||||
totalLogs: number;
|
||||
|
||||
/** Total logs successfully indexed */
|
||||
totalSuccessful: number;
|
||||
|
||||
/** Total logs failed */
|
||||
totalFailed: number;
|
||||
|
||||
/** Total logs sampled out */
|
||||
totalSampled: number;
|
||||
|
||||
/** Total logs dropped due to queue overflow */
|
||||
totalDropped: number;
|
||||
|
||||
/** Current queue size */
|
||||
queueSize: number;
|
||||
|
||||
/** Average batch duration */
|
||||
avgBatchDurationMs: number;
|
||||
|
||||
/** Last flush timestamp */
|
||||
lastFlushAt?: Date;
|
||||
}
|
||||
Reference in New Issue
Block a user