Files
elasticsearch/ts/examples/logging/logging-example.ts

474 lines
14 KiB
TypeScript

/**
* Comprehensive Logging API Example
*
* Demonstrates enterprise logging with structured log ingestion
*/
import {
createConfig,
ElasticsearchConnectionManager,
LogLevel,
LogDestination,
addHostInfo,
addEnvironment,
addServiceInfo,
addTimestamp,
sanitizeSensitiveData,
addDynamicTags,
chainEnrichers,
} from '../../index.js';
import type { LogEntry } from '../../index.js';
async function main() {
console.log('=== Logging API Example ===\n');
// ============================================================================
// Step 1: Configuration
// ============================================================================
console.log('Step 1: Configuring Elasticsearch connection...');
const config = createConfig()
.fromEnv()
.nodes(process.env.ELASTICSEARCH_URL || 'http://localhost:9200')
.basicAuth(
process.env.ELASTICSEARCH_USERNAME || 'elastic',
process.env.ELASTICSEARCH_PASSWORD || 'changeme'
)
.timeout(30000)
.retries(3)
.logLevel(LogLevel.INFO)
.enableMetrics(true)
.enableTracing(true, { serviceName: 'logging-example', serviceVersion: '1.0.0' })
.build();
// ============================================================================
// Step 2: Initialize Connection
// ============================================================================
console.log('Step 2: Initializing connection manager...');
const connectionManager = ElasticsearchConnectionManager.getInstance(config);
await connectionManager.initialize();
console.log('✓ Connection manager initialized\n');
// ============================================================================
// Step 3: Basic Log Destination
// ============================================================================
console.log('Step 3: Creating basic log destination...');
const basicLogDest = new LogDestination({
indexPattern: 'logs-example-basic-{now/d}',
batchSize: 50,
flushIntervalMs: 3000,
autoCreateTemplate: true,
});
await basicLogDest.initialize();
console.log('✓ Basic log destination initialized');
// Send basic logs
await basicLogDest.send({
timestamp: new Date().toISOString(),
level: 'INFO',
message: 'Application started',
service: 'example-app',
version: '1.0.0',
});
await basicLogDest.send({
timestamp: new Date().toISOString(),
level: 'WARN',
message: 'High memory usage detected',
metadata: {
memoryUsage: '85%',
threshold: '80%',
},
});
await basicLogDest.send({
timestamp: new Date().toISOString(),
level: 'ERROR',
message: 'Failed to connect to database',
error: {
name: 'ConnectionError',
message: 'ECONNREFUSED',
stack: 'Error: ECONNREFUSED\\n at ...',
code: 'ECONNREFUSED',
},
});
await basicLogDest.flush();
console.log('✓ Basic logs sent (3 logs)');
console.log(' Stats:', basicLogDest.getStats());
console.log();
// ============================================================================
// Step 4: Log Destination with Enrichers
// ============================================================================
console.log('Step 4: Creating log destination with enrichers...');
const enrichedLogDest = new LogDestination({
indexPattern: 'logs-example-enriched-{now/d}',
batchSize: 100,
flushIntervalMs: 5000,
enrichers: [
addTimestamp,
addHostInfo,
addEnvironment,
addServiceInfo,
sanitizeSensitiveData([
{ path: 'metadata.password' },
{ path: 'metadata.apiKey' },
{ path: 'metadata.token' },
]),
],
});
await enrichedLogDest.initialize();
console.log('✓ Enriched log destination initialized');
// Send logs that will be enriched
await enrichedLogDest.send({
timestamp: new Date().toISOString(),
level: 'INFO',
message: 'User authenticated',
metadata: {
userId: 'user-123',
username: 'john.doe',
password: 'supersecret', // Will be sanitized
ipAddress: '192.168.1.100',
},
});
await enrichedLogDest.send({
timestamp: new Date().toISOString(),
level: 'INFO',
message: 'API request processed',
metadata: {
method: 'POST',
path: '/api/users',
statusCode: 201,
apiKey: 'sk-1234567890', // Will be sanitized
duration: 45,
},
});
await enrichedLogDest.flush();
console.log('✓ Enriched logs sent (2 logs)');
console.log(' Logs enriched with: timestamp, host, environment, service info');
console.log(' Sensitive data sanitized: password, apiKey');
console.log();
// ============================================================================
// Step 5: Sampling Strategies
// ============================================================================
console.log('Step 5: Demonstrating sampling strategies...');
// 5.1: Errors-only sampling
console.log('5.1: Errors-only sampling');
const errorsOnlyDest = new LogDestination({
indexPattern: 'logs-example-errors-{now/d}',
batchSize: 50,
flushIntervalMs: 3000,
sampling: {
strategy: 'errors-only',
},
});
await errorsOnlyDest.initialize();
// Send mixed logs
await errorsOnlyDest.send({
timestamp: new Date().toISOString(),
level: 'INFO',
message: 'This will be sampled out',
});
await errorsOnlyDest.send({
timestamp: new Date().toISOString(),
level: 'ERROR',
message: 'This error will be kept',
error: { name: 'Error', message: 'Something went wrong' },
});
await errorsOnlyDest.flush();
console.log(' Sent 2 logs (1 INFO, 1 ERROR)');
const errorsStats = errorsOnlyDest.getStats();
console.log(` Indexed: ${errorsStats.totalSuccessful}, Sampled out: ${errorsStats.totalSampled}`);
console.log();
// 5.2: Percentage sampling
console.log('5.2: Percentage sampling (10%)');
const percentageDest = new LogDestination({
indexPattern: 'logs-example-percentage-{now/d}',
batchSize: 50,
flushIntervalMs: 3000,
sampling: {
strategy: 'percentage',
percentage: 10,
alwaysSampleErrors: true,
},
});
await percentageDest.initialize();
// Send many logs
for (let i = 0; i < 100; i++) {
await percentageDest.send({
timestamp: new Date().toISOString(),
level: 'INFO',
message: `Log ${i}`,
});
}
// Send an error (should always be kept)
await percentageDest.send({
timestamp: new Date().toISOString(),
level: 'ERROR',
message: 'Error log (always kept)',
});
await percentageDest.flush();
const percentageStats = percentageDest.getStats();
console.log(` Sent 101 logs (100 INFO, 1 ERROR)`);
console.log(` Indexed: ${percentageStats.totalSuccessful} (~10% of INFO + 1 ERROR)`);
console.log(` Sampled out: ${percentageStats.totalSampled}`);
console.log();
// ============================================================================
// Step 6: Dynamic Tags
// ============================================================================
console.log('Step 6: Adding dynamic tags based on content...');
const taggedDest = new LogDestination({
indexPattern: 'logs-example-tagged-{now/d}',
batchSize: 50,
flushIntervalMs: 3000,
enrichers: [
addDynamicTags([
{
condition: (entry) => entry.level === 'ERROR',
tag: 'alert',
},
{
condition: (entry) =>
entry.message.toLowerCase().includes('payment') ||
entry.message.toLowerCase().includes('transaction'),
tag: 'financial',
},
{
condition: (entry) =>
entry.metadata?.statusCode !== undefined && (entry.metadata.statusCode as number) >= 500,
tag: 'server-error',
},
]),
],
});
await taggedDest.initialize();
await taggedDest.send({
timestamp: new Date().toISOString(),
level: 'INFO',
message: 'Payment processed successfully',
metadata: { amount: 99.99, currency: 'USD', statusCode: 200 },
});
await taggedDest.send({
timestamp: new Date().toISOString(),
level: 'ERROR',
message: 'Transaction failed',
metadata: { statusCode: 500 },
error: { name: 'PaymentError', message: 'Card declined' },
});
await taggedDest.flush();
console.log('✓ Tagged logs sent');
console.log(' First log tagged: [financial]');
console.log(' Second log tagged: [alert, financial, server-error]');
console.log();
// ============================================================================
// Step 7: ILM (Index Lifecycle Management)
// ============================================================================
console.log('Step 7: Creating log destination with ILM policy...');
const ilmDest = new LogDestination({
indexPattern: 'logs-example-ilm-{now/d}',
batchSize: 50,
flushIntervalMs: 3000,
ilm: {
name: 'logs-example-policy',
hotDuration: '7d',
warmDuration: '30d',
deleteDuration: '90d',
rollover: {
maxSize: '50gb',
maxAge: '1d',
},
},
});
await ilmDest.initialize();
console.log('✓ ILM policy created');
console.log(' Hot: 7 days → Warm: 30 days → Delete: 90 days');
console.log(' Rollover: 50GB or 1 day');
console.log();
// ============================================================================
// Step 8: Metric Extraction
// ============================================================================
console.log('Step 8: Extracting metrics from logs...');
const metricsDest = new LogDestination({
indexPattern: 'logs-example-metrics-{now/d}',
batchSize: 50,
flushIntervalMs: 3000,
metrics: [
{
name: 'api_response_time',
field: 'metrics.duration',
type: 'histogram',
labels: ['metadata.method', 'metadata.statusCode'],
},
{
name: 'errors_total',
field: 'level',
type: 'counter',
labels: ['error.name'],
},
],
});
await metricsDest.initialize();
// Send logs with metrics
await metricsDest.send({
timestamp: new Date().toISOString(),
level: 'INFO',
message: 'API request completed',
metadata: {
method: 'GET',
path: '/api/users',
statusCode: 200,
},
metrics: {
duration: 42,
},
});
await metricsDest.send({
timestamp: new Date().toISOString(),
level: 'ERROR',
message: 'API request failed',
metadata: {
method: 'POST',
statusCode: 500,
},
error: {
name: 'ValidationError',
message: 'Invalid input',
},
metrics: {
duration: 15,
},
});
await metricsDest.flush();
console.log('✓ Logs with metrics sent');
console.log(' Metrics extracted: api_response_time, errors_total');
console.log();
// ============================================================================
// Step 9: Chained Enrichers
// ============================================================================
console.log('Step 9: Using chained enrichers...');
const chainedDest = new LogDestination({
indexPattern: 'logs-example-chained-{now/d}',
batchSize: 50,
flushIntervalMs: 3000,
enrichers: [
chainEnrichers(
addTimestamp,
addHostInfo,
addEnvironment,
addServiceInfo,
sanitizeSensitiveData([{ path: 'metadata.secret' }]),
addDynamicTags([
{
condition: (entry) => entry.level === 'ERROR',
tag: 'needs-attention',
},
])
),
],
});
await chainedDest.initialize();
await chainedDest.send({
timestamp: new Date().toISOString(),
level: 'ERROR',
message: 'Critical error occurred',
metadata: {
secret: 'should-be-redacted',
component: 'auth-service',
},
});
await chainedDest.flush();
console.log('✓ Log sent through enrichment chain');
console.log(' Applied: timestamp → host → env → service → sanitize → tags');
console.log();
// ============================================================================
// Step 10: Statistics and Monitoring
// ============================================================================
console.log('Step 10: Reviewing statistics...\n');
const stats = enrichedLogDest.getStats();
console.log('Enriched Log Destination Stats:');
console.log(` Total logs: ${stats.totalLogs}`);
console.log(` Successfully indexed: ${stats.totalSuccessful}`);
console.log(` Failed: ${stats.totalFailed}`);
console.log(` Sampled out: ${stats.totalSampled}`);
console.log(` Dropped (queue overflow): ${stats.totalDropped}`);
console.log(` Current queue size: ${stats.queueSize}`);
console.log(` Avg batch duration: ${stats.avgBatchDurationMs.toFixed(2)}ms`);
if (stats.lastFlushAt) {
console.log(` Last flush: ${stats.lastFlushAt.toISOString()}`);
}
console.log();
// ============================================================================
// Step 11: Cleanup
// ============================================================================
console.log('Step 11: Cleanup...');
await basicLogDest.destroy();
await enrichedLogDest.destroy();
await errorsOnlyDest.destroy();
await percentageDest.destroy();
await taggedDest.destroy();
await ilmDest.destroy();
await metricsDest.destroy();
await chainedDest.destroy();
console.log('✓ All log destinations destroyed (flushed and closed)');
await connectionManager.destroy();
console.log('✓ Connection closed\n');
console.log('=== Logging API Example Complete ===');
console.log('\nKey Features Demonstrated:');
console.log(' ✓ Basic structured logging');
console.log(' ✓ Log enrichment (host, environment, service info)');
console.log(' ✓ Sensitive data sanitization');
console.log(' ✓ Sampling strategies (errors-only, percentage)');
console.log(' ✓ Dynamic tagging based on content');
console.log(' ✓ ILM (Index Lifecycle Management)');
console.log(' ✓ Metric extraction from logs');
console.log(' ✓ Chained enrichers');
console.log(' ✓ Batching and auto-flushing');
console.log(' ✓ Statistics and monitoring');
}
// Run the example
main().catch((error) => {
console.error('Example failed:', error);
process.exit(1);
});