BREAKING CHANGE(core): Refactor to v3: introduce modular core/domain architecture, plugin system, observability and strict TypeScript configuration; remove legacy classes

This commit is contained in:
2025-11-29 18:32:00 +00:00
parent 53673e37cb
commit 7e89b6ebf5
68 changed files with 17020 additions and 720 deletions

View File

@@ -0,0 +1,329 @@
/**
* Complete Example - Enterprise Elasticsearch Client
*
* This example demonstrates:
* - Configuration with environment variables
* - Connection management with health checks
* - Document operations with sessions
* - Snapshot functionality
* - Error handling and observability
*/
import {
createConfig,
ElasticsearchConnectionManager,
LogLevel,
defaultLogger,
defaultMetricsCollector,
} from '../../core/index.js';
import { DocumentManager } from '../../domain/documents/index.js';
// ============================================================================
// Type Definitions
// ============================================================================
interface Product {
name: string;
description: string;
price: number;
category: string;
inStock: boolean;
tags: string[];
createdAt: Date;
updatedAt: Date;
}
interface ProductSnapshot {
totalProducts: number;
averagePrice: number;
categoryCounts: Record<string, number>;
outOfStockCount: number;
}
// ============================================================================
// Main Example
// ============================================================================
async function main() {
// --------------------------------------------------------------------------
// Step 1: Configuration
// --------------------------------------------------------------------------
console.log('🔧 Step 1: Creating configuration...\n');
const config = createConfig()
// Load from environment variables (ELASTICSEARCH_URL, etc.)
.fromEnv()
// Or specify directly
.nodes(process.env.ELASTICSEARCH_URL || 'http://localhost:9200')
.basicAuth(
process.env.ELASTICSEARCH_USERNAME || 'elastic',
process.env.ELASTICSEARCH_PASSWORD || 'changeme'
)
// Request settings
.timeout(30000)
.retries(3)
.compression(true)
// Connection pool
.poolSize(10, 2)
// Observability
.logLevel(LogLevel.INFO)
.enableRequestLogging(true)
.enableMetrics(true)
.enableTracing(true, {
serviceName: 'product-catalog',
serviceVersion: '1.0.0',
})
.build();
console.log('✅ Configuration created successfully\n');
// --------------------------------------------------------------------------
// Step 2: Initialize Connection Manager
// --------------------------------------------------------------------------
console.log('🔌 Step 2: Initializing connection manager...\n');
const connectionManager = ElasticsearchConnectionManager.getInstance(config);
await connectionManager.initialize();
console.log('✅ Connection established');
console.log(` Health Status: ${connectionManager.getHealthStatus()}`);
console.log(` Circuit State: ${connectionManager.getCircuitState()}\n`);
// --------------------------------------------------------------------------
// Step 3: Create Document Manager
// --------------------------------------------------------------------------
console.log('📦 Step 3: Creating document manager...\n');
const productManager = new DocumentManager<Product>({
index: 'products',
connectionManager,
autoCreateIndex: true,
});
await productManager.initialize();
console.log('✅ Document manager initialized\n');
// --------------------------------------------------------------------------
// Step 4: Individual Document Operations
// --------------------------------------------------------------------------
console.log('📝 Step 4: Individual document operations...\n');
// Create a product
await productManager.create('prod-001', {
name: 'Premium Widget',
description: 'A high-quality widget for all your needs',
price: 99.99,
category: 'widgets',
inStock: true,
tags: ['premium', 'bestseller'],
createdAt: new Date(),
updatedAt: new Date(),
});
console.log(' ✓ Created product prod-001');
// Upsert (create or update)
await productManager.upsert('prod-002', {
name: 'Deluxe Gadget',
description: 'The ultimate gadget',
price: 149.99,
category: 'gadgets',
inStock: true,
tags: ['deluxe', 'featured'],
createdAt: new Date(),
updatedAt: new Date(),
});
console.log(' ✓ Upserted product prod-002');
// Get a product
const product = await productManager.get('prod-001');
console.log(` ✓ Retrieved product: ${product?._source.name}`);
// Update a product
await productManager.update('prod-001', {
price: 89.99, // Price reduction!
updatedAt: new Date(),
});
console.log(' ✓ Updated product prod-001\n');
// --------------------------------------------------------------------------
// Step 5: Session-Based Batch Operations
// --------------------------------------------------------------------------
console.log('🔄 Step 5: Session-based batch operations...\n');
const session = productManager.session({
cleanupStale: true, // Delete documents not in this session
batchSize: 100,
});
const batchResult = await session
.start()
.upsert('prod-003', {
name: 'Standard Widget',
description: 'A reliable widget',
price: 49.99,
category: 'widgets',
inStock: true,
tags: ['standard'],
createdAt: new Date(),
updatedAt: new Date(),
})
.upsert('prod-004', {
name: 'Mini Gadget',
description: 'Compact and efficient',
price: 29.99,
category: 'gadgets',
inStock: false,
tags: ['compact', 'mini'],
createdAt: new Date(),
updatedAt: new Date(),
})
.upsert('prod-005', {
name: 'Mega Widget Pro',
description: 'Professional grade widget',
price: 199.99,
category: 'widgets',
inStock: true,
tags: ['professional', 'premium'],
createdAt: new Date(),
updatedAt: new Date(),
})
.commit();
console.log(` ✓ Batch operation completed:`);
console.log(` - Successful: ${batchResult.successful}`);
console.log(` - Failed: ${batchResult.failed}`);
console.log(` - Time: ${batchResult.took}ms\n`);
// --------------------------------------------------------------------------
// Step 6: Iteration Over Documents
// --------------------------------------------------------------------------
console.log('🔍 Step 6: Iterating over documents...\n');
let count = 0;
for await (const doc of productManager.iterate()) {
count++;
console.log(` ${count}. ${doc._source.name} - $${doc._source.price}`);
}
console.log(`\n ✓ Iterated over ${count} documents\n`);
// --------------------------------------------------------------------------
// Step 7: Create Snapshot with Analytics
// --------------------------------------------------------------------------
console.log('📸 Step 7: Creating snapshot with analytics...\n');
const snapshot = await productManager.snapshot<ProductSnapshot>(
async (iterator, previousSnapshot) => {
console.log(' 🔄 Processing snapshot...');
let totalPrice = 0;
let productCount = 0;
const categoryCounts: Record<string, number> = {};
let outOfStockCount = 0;
for await (const doc of iterator) {
productCount++;
totalPrice += doc._source.price;
const category = doc._source.category;
categoryCounts[category] = (categoryCounts[category] || 0) + 1;
if (!doc._source.inStock) {
outOfStockCount++;
}
}
const analytics: ProductSnapshot = {
totalProducts: productCount,
averagePrice: productCount > 0 ? totalPrice / productCount : 0,
categoryCounts,
outOfStockCount,
};
if (previousSnapshot) {
console.log(` 📊 Previous snapshot had ${previousSnapshot.totalProducts} products`);
}
return analytics;
}
);
console.log('\n ✅ Snapshot created:');
console.log(` - Total Products: ${snapshot.data.totalProducts}`);
console.log(` - Average Price: $${snapshot.data.averagePrice.toFixed(2)}`);
console.log(` - Out of Stock: ${snapshot.data.outOfStockCount}`);
console.log(` - Categories:`);
for (const [category, count] of Object.entries(snapshot.data.categoryCounts)) {
console.log(`${category}: ${count}`);
}
console.log(` - Processing Time: ${snapshot.processingTime}ms\n`);
// --------------------------------------------------------------------------
// Step 8: Health Check & Metrics
// --------------------------------------------------------------------------
console.log('❤️ Step 8: Health check and metrics...\n');
const healthResult = await connectionManager.healthCheck();
console.log(' Health Check:');
console.log(` - Status: ${healthResult.status}`);
console.log(` - Cluster Health: ${healthResult.clusterHealth}`);
console.log(` - Active Nodes: ${healthResult.activeNodes}`);
console.log(` - Response Time: ${healthResult.responseTimeMs}ms\n`);
// Export metrics in Prometheus format
const metricsExport = defaultMetricsCollector.export();
console.log(' 📊 Metrics (sample):');
console.log(metricsExport.split('\n').slice(0, 20).join('\n'));
console.log(' ...\n');
// --------------------------------------------------------------------------
// Step 9: Error Handling Demo
// --------------------------------------------------------------------------
console.log('⚠️ Step 9: Error handling demo...\n');
try {
await productManager.get('non-existent-id');
} catch (error) {
console.log(' ✓ Gracefully handled non-existent document (returns null)\n');
}
try {
const nonExistentManager = new DocumentManager<Product>({
index: 'non-existent-index',
connectionManager,
autoCreateIndex: false,
});
await nonExistentManager.initialize();
} catch (error: any) {
console.log(` ✓ Caught expected error: ${error.message}`);
console.log(` Error Code: ${error.code}\n`);
}
// --------------------------------------------------------------------------
// Step 10: Cleanup
// --------------------------------------------------------------------------
console.log('🧹 Step 10: Cleanup...\n');
// Optional: Delete the index
// await productManager.deleteIndex();
// console.log(' ✓ Index deleted');
// Close connections
await connectionManager.destroy();
console.log(' ✓ Connections closed\n');
console.log('✨ Example completed successfully!\n');
}
// ============================================================================
// Run Example
// ============================================================================
if (import.meta.url === `file://${process.argv[1]}`) {
main().catch((error) => {
console.error('❌ Example failed:', error);
defaultLogger.error('Example failed', error);
process.exit(1);
});
}
export { main };

View File

@@ -0,0 +1,368 @@
/**
* Comprehensive Bulk Indexer Example
*
* Demonstrates high-throughput document ingestion with adaptive batching
*/
import {
createConfig,
ElasticsearchConnectionManager,
LogLevel,
BulkIndexer,
type BulkProgress,
type BulkBatchResult,
} from '../../index.js';
interface Product {
id: string;
name: string;
description: string;
category: string;
price: number;
stock: number;
createdAt: Date;
}
async function main() {
console.log('=== Bulk Indexer Example ===\n');
// ============================================================================
// Step 1: Configuration
// ============================================================================
console.log('Step 1: Configuring Elasticsearch connection...');
const config = createConfig()
.fromEnv()
.nodes(process.env.ELASTICSEARCH_URL || 'http://localhost:9200')
.basicAuth(
process.env.ELASTICSEARCH_USERNAME || 'elastic',
process.env.ELASTICSEARCH_PASSWORD || 'changeme'
)
.timeout(30000)
.retries(3)
.logLevel(LogLevel.INFO)
.enableMetrics(true)
.build();
// ============================================================================
// Step 2: Initialize Connection
// ============================================================================
console.log('Step 2: Initializing connection manager...');
const connectionManager = ElasticsearchConnectionManager.getInstance(config);
await connectionManager.initialize();
console.log('✓ Connection manager initialized\n');
// ============================================================================
// Step 3: Basic Bulk Indexing
// ============================================================================
console.log('Step 3: Basic bulk indexing...');
const basicIndexer = new BulkIndexer({
batchingStrategy: 'fixed',
batchSize: 100,
flushIntervalMs: 2000,
workers: 2,
});
await basicIndexer.start();
// Index documents
const startTime = Date.now();
for (let i = 1; i <= 500; i++) {
await basicIndexer.index('products-basic', `product-${i}`, {
id: `product-${i}`,
name: `Product ${i}`,
description: `Description for product ${i}`,
category: `Category ${(i % 5) + 1}`,
price: Math.random() * 1000,
stock: Math.floor(Math.random() * 100),
createdAt: new Date(),
});
}
await basicIndexer.flush();
await basicIndexer.stop();
const duration = Date.now() - startTime;
const stats = basicIndexer.getStats();
console.log('✓ Basic indexing complete');
console.log(` Indexed: ${stats.totalSuccessful} documents`);
console.log(` Duration: ${duration}ms`);
console.log(` Throughput: ${((stats.totalSuccessful / duration) * 1000).toFixed(0)} docs/sec`);
console.log(` Avg batch size: ${stats.avgBatchSize.toFixed(0)}`);
console.log(` Avg batch duration: ${stats.avgBatchDurationMs.toFixed(0)}ms`);
console.log();
// ============================================================================
// Step 4: Adaptive Batching
// ============================================================================
console.log('Step 4: Adaptive batching...');
const adaptiveIndexer = new BulkIndexer({
batchingStrategy: 'adaptive',
minBatchSize: 50,
maxBatchSize: 500,
flushIntervalMs: 3000,
workers: 3,
onProgress: (progress: BulkProgress) => {
if (progress.totalProcessed % 200 === 0 && progress.totalProcessed > 0) {
console.log(
` Progress: ${progress.totalProcessed}/${progress.totalSubmitted} ` +
`(${progress.operationsPerSecond} ops/sec, ${progress.queueSize} queued)`
);
}
},
});
await adaptiveIndexer.start();
// Index larger dataset
console.log(' Indexing 1000 documents with adaptive batching...');
for (let i = 1; i <= 1000; i++) {
await adaptiveIndexer.index('products-adaptive', `product-${i}`, {
id: `product-${i}`,
name: `Adaptive Product ${i}`,
description: `Description for adaptive product ${i}`,
category: `Category ${(i % 10) + 1}`,
price: Math.random() * 2000,
stock: Math.floor(Math.random() * 200),
createdAt: new Date(),
});
}
await adaptiveIndexer.flush();
await adaptiveIndexer.stop();
const adaptiveStats = adaptiveIndexer.getStats();
console.log('✓ Adaptive indexing complete');
console.log(` Indexed: ${adaptiveStats.totalSuccessful} documents`);
console.log(` Avg batch size: ${adaptiveStats.avgBatchSize.toFixed(0)} (adapted based on performance)`);
console.log(` Avg ops/sec: ${adaptiveStats.avgOpsPerSecond.toFixed(0)}`);
console.log();
// ============================================================================
// Step 5: Progress Callbacks
// ============================================================================
console.log('Step 5: Using progress callbacks...');
let lastProgress = 0;
const progressIndexer = new BulkIndexer({
batchSize: 100,
workers: 4,
onProgress: (progress: BulkProgress) => {
const percent = (progress.totalProcessed / progress.totalSubmitted) * 100;
if (percent - lastProgress >= 20) {
console.log(` ${percent.toFixed(0)}% complete (${progress.totalProcessed}/${progress.totalSubmitted})`);
if (progress.estimatedTimeRemainingMs !== undefined) {
console.log(` ETA: ${(progress.estimatedTimeRemainingMs / 1000).toFixed(1)}s`);
}
lastProgress = percent;
}
},
onBatchSuccess: (result: BulkBatchResult) => {
if (result.failed > 0) {
console.log(` Batch completed: ${result.successful} ok, ${result.failed} failed`);
}
},
});
await progressIndexer.start();
for (let i = 1; i <= 500; i++) {
await progressIndexer.index('products-progress', `product-${i}`, {
id: `product-${i}`,
name: `Progress Product ${i}`,
description: `Description ${i}`,
category: `Category ${(i % 3) + 1}`,
price: Math.random() * 500,
stock: Math.floor(Math.random() * 50),
createdAt: new Date(),
});
}
await progressIndexer.flush();
await progressIndexer.stop();
console.log('✓ Progress tracking complete\n');
// ============================================================================
// Step 6: Backpressure Handling
// ============================================================================
console.log('Step 6: Demonstrating backpressure handling...');
const backpressureIndexer = new BulkIndexer({
batchSize: 50,
maxQueueSize: 200,
flushIntervalMs: 1000,
workers: 1, // Single worker to create backpressure
});
await backpressureIndexer.start();
console.log(' Submitting operations rapidly...');
let backpressureHits = 0;
for (let i = 1; i <= 300; i++) {
const backpressure = backpressureIndexer.getBackpressure();
if (backpressure.active && i % 50 === 0) {
console.log(
` Backpressure detected: ${backpressure.queueUtilization.toFixed(0)}% queue utilization ` +
`(waiting ${backpressure.recommendedWaitMs}ms)`
);
backpressureHits++;
}
await backpressureIndexer.index('products-backpressure', `product-${i}`, {
id: `product-${i}`,
name: `Backpressure Product ${i}`,
description: `Test ${i}`,
category: `Cat ${i % 2}`,
price: i * 10,
stock: i,
createdAt: new Date(),
});
}
await backpressureIndexer.flush();
await backpressureIndexer.stop();
console.log('✓ Backpressure handling demonstrated');
console.log(` Backpressure events: ${backpressureHits}`);
console.log();
// ============================================================================
// Step 7: Mixed Operations
// ============================================================================
console.log('Step 7: Mixed operations (index, update, delete)...');
const mixedIndexer = new BulkIndexer({
batchSize: 50,
workers: 2,
});
await mixedIndexer.start();
// Index documents
for (let i = 1; i <= 100; i++) {
await mixedIndexer.index('products-mixed', `product-${i}`, {
id: `product-${i}`,
name: `Mixed Product ${i}`,
description: `Original description ${i}`,
category: `Category ${(i % 5) + 1}`,
price: i * 100,
stock: i * 10,
createdAt: new Date(),
});
}
// Update some documents
for (let i = 1; i <= 30; i++) {
await mixedIndexer.update<Product>('products-mixed', `product-${i}`, {
price: i * 150, // Updated price
stock: i * 15, // Updated stock
});
}
// Delete some documents
for (let i = 91; i <= 100; i++) {
await mixedIndexer.delete('products-mixed', `product-${i}`);
}
await mixedIndexer.flush();
const mixedStats = mixedIndexer.getStats();
await mixedIndexer.stop();
console.log('✓ Mixed operations complete');
console.log(` Total operations: ${mixedStats.totalProcessed}`);
console.log(` Index: 100, Update: 30, Delete: 10`);
console.log(` Successful: ${mixedStats.totalSuccessful}`);
console.log(` Failed: ${mixedStats.totalFailed}`);
console.log();
// ============================================================================
// Step 8: Dead-Letter Queue
// ============================================================================
console.log('Step 8: Dead-letter queue for failed operations...');
const dlqIndexer = new BulkIndexer({
batchSize: 50,
maxRetries: 2,
retryDelayMs: 500,
enableDeadLetterQueue: true,
deadLetterIndex: 'failed-operations-{now/d}',
workers: 2,
});
await dlqIndexer.start();
// Index valid documents
for (let i = 1; i <= 50; i++) {
await dlqIndexer.index('products-dlq', `product-${i}`, {
id: `product-${i}`,
name: `DLQ Product ${i}`,
description: `Description ${i}`,
category: `Cat ${i % 3}`,
price: i * 50,
stock: i * 5,
createdAt: new Date(),
});
}
await dlqIndexer.flush();
// Wait a bit for any retries
await new Promise((resolve) => setTimeout(resolve, 2000));
const dlqStats = dlqIndexer.getStats();
await dlqIndexer.stop();
console.log('✓ Dead-letter queue test complete');
console.log(` Successful: ${dlqStats.totalSuccessful}`);
console.log(` Failed (after retries): ${dlqStats.totalFailed}`);
console.log(` Sent to DLQ: ${dlqStats.totalDeadLettered}`);
console.log();
// ============================================================================
// Step 9: Statistics Summary
// ============================================================================
console.log('Step 9: Final statistics summary...\n');
const finalStats = dlqIndexer.getStats();
console.log('Sample Indexer Statistics:');
console.log(` Total submitted: ${finalStats.totalSubmitted}`);
console.log(` Total processed: ${finalStats.totalProcessed}`);
console.log(` Total successful: ${finalStats.totalSuccessful}`);
console.log(` Total failed: ${finalStats.totalFailed}`);
console.log(` Total dead-lettered: ${finalStats.totalDeadLettered}`);
console.log(` Total batches: ${finalStats.totalBatches}`);
console.log(` Avg batch size: ${finalStats.avgBatchSize.toFixed(1)}`);
console.log(` Avg batch duration: ${finalStats.avgBatchDurationMs.toFixed(1)}ms`);
console.log(` Avg ops/sec: ${finalStats.avgOpsPerSecond.toFixed(0)}`);
console.log();
// ============================================================================
// Step 10: Cleanup
// ============================================================================
console.log('Step 10: Cleanup...');
await connectionManager.destroy();
console.log('✓ Connection closed\n');
console.log('=== Bulk Indexer Example Complete ===');
console.log('\nKey Features Demonstrated:');
console.log(' ✓ Fixed batch size strategy');
console.log(' ✓ Adaptive batching (adjusts based on performance)');
console.log(' ✓ Progress callbacks with ETA');
console.log(' ✓ Backpressure handling');
console.log(' ✓ Mixed operations (index, update, delete)');
console.log(' ✓ Dead-letter queue for failed operations');
console.log(' ✓ Automatic retries with exponential backoff');
console.log(' ✓ Parallel workers');
console.log(' ✓ Comprehensive statistics');
}
// Run the example
main().catch((error) => {
console.error('Example failed:', error);
process.exit(1);
});

View File

@@ -0,0 +1,472 @@
/**
* Comprehensive KV Store Example
*
* Demonstrates distributed key-value storage with TTL and caching
*/
import {
createConfig,
ElasticsearchConnectionManager,
LogLevel,
KVStore,
type KVStoreConfig,
} from '../../index.js';
interface UserSession {
userId: string;
username: string;
email: string;
roles: string[];
loginAt: Date;
lastActivityAt: Date;
metadata: {
ip: string;
userAgent: string;
};
}
interface CacheData {
query: string;
results: unknown[];
computedAt: Date;
ttl: number;
}
async function main() {
console.log('=== KV Store Example ===\n');
// ============================================================================
// Step 1: Configuration
// ============================================================================
console.log('Step 1: Configuring Elasticsearch connection...');
const config = createConfig()
.fromEnv()
.nodes(process.env.ELASTICSEARCH_URL || 'http://localhost:9200')
.basicAuth(
process.env.ELASTICSEARCH_USERNAME || 'elastic',
process.env.ELASTICSEARCH_PASSWORD || 'changeme'
)
.timeout(30000)
.retries(3)
.logLevel(LogLevel.INFO)
.enableMetrics(true)
.build();
// ============================================================================
// Step 2: Initialize Connection
// ============================================================================
console.log('Step 2: Initializing connection manager...');
const connectionManager = ElasticsearchConnectionManager.getInstance(config);
await connectionManager.initialize();
console.log('✓ Connection manager initialized\n');
// ============================================================================
// Step 3: Basic KV Operations
// ============================================================================
console.log('Step 3: Basic key-value operations...');
const basicKV = new KVStore<string>({
index: 'kv-basic',
enableCache: true,
cacheMaxSize: 1000,
});
await basicKV.initialize();
// Set a value
await basicKV.set('user:1:name', 'Alice Johnson');
await basicKV.set('user:2:name', 'Bob Smith');
await basicKV.set('user:3:name', 'Charlie Brown');
// Get a value
const result = await basicKV.get('user:1:name');
console.log(` Retrieved: ${result.value} (cache hit: ${result.cacheHit})`);
// Get again (should hit cache)
const cachedResult = await basicKV.get('user:1:name');
console.log(` Retrieved: ${cachedResult.value} (cache hit: ${cachedResult.cacheHit})`);
// Check existence
const exists = await basicKV.exists('user:1:name');
console.log(` Key exists: ${exists}`);
// Delete a key
await basicKV.delete('user:3:name');
const deletedExists = await basicKV.exists('user:3:name');
console.log(` Deleted key exists: ${deletedExists}`);
console.log('✓ Basic operations complete\n');
// ============================================================================
// Step 4: TTL Support
// ============================================================================
console.log('Step 4: TTL (Time-To-Live) support...');
const ttlKV = new KVStore<UserSession>({
index: 'kv-sessions',
defaultTTL: 3600, // 1 hour default
enableCache: true,
enableExpirationCleanup: true,
cleanupIntervalSeconds: 60,
});
await ttlKV.initialize();
// Set session with 5-second TTL
const session: UserSession = {
userId: 'user-123',
username: 'alice',
email: 'alice@example.com',
roles: ['user', 'admin'],
loginAt: new Date(),
lastActivityAt: new Date(),
metadata: {
ip: '192.168.1.100',
userAgent: 'Mozilla/5.0',
},
};
await ttlKV.set('session:alice-token-xyz', session, { ttl: 5 });
console.log(' Session stored with 5-second TTL');
// Get immediately
const sessionResult = await ttlKV.get('session:alice-token-xyz');
console.log(` Session retrieved: ${sessionResult.value?.username}`);
console.log(` Expires at: ${sessionResult.expiresAt?.toISOString()}`);
// Wait 6 seconds and try again
console.log(' Waiting 6 seconds for expiration...');
await new Promise((resolve) => setTimeout(resolve, 6000));
const expiredResult = await ttlKV.get('session:alice-token-xyz');
console.log(` After expiration - exists: ${expiredResult.exists}`);
console.log('✓ TTL support demonstrated\n');
// ============================================================================
// Step 5: Batch Operations
// ============================================================================
console.log('Step 5: Batch operations...');
const batchKV = new KVStore<CacheData>({
index: 'kv-cache',
enableCache: true,
cacheMaxSize: 5000,
});
await batchKV.initialize();
// Batch set
const cacheEntries = [
{
key: 'cache:query:1',
value: {
query: 'SELECT * FROM users',
results: [{ id: 1, name: 'Alice' }],
computedAt: new Date(),
ttl: 300,
},
options: { ttl: 300 },
},
{
key: 'cache:query:2',
value: {
query: 'SELECT * FROM products',
results: [{ id: 1, name: 'Product A' }],
computedAt: new Date(),
ttl: 300,
},
options: { ttl: 300 },
},
{
key: 'cache:query:3',
value: {
query: 'SELECT * FROM orders',
results: [{ id: 1, total: 100 }],
computedAt: new Date(),
ttl: 300,
},
options: { ttl: 300 },
},
];
const msetResult = await batchKV.mset(cacheEntries);
console.log(` Batch set: ${msetResult.successful} successful, ${msetResult.failed} failed`);
// Batch get
const mgetResult = await batchKV.mget([
'cache:query:1',
'cache:query:2',
'cache:query:3',
'cache:query:999', // Doesn't exist
]);
console.log(` Batch get: ${mgetResult.found} found, ${mgetResult.notFound} not found`);
console.log(` Cache hits: ${mgetResult.cacheHits}`);
// Batch delete
const mdeleteResult = await batchKV.mdelete(['cache:query:1', 'cache:query:2']);
console.log(
` Batch delete: ${mdeleteResult.successful} successful, ${mdeleteResult.failed} failed`
);
console.log('✓ Batch operations complete\n');
// ============================================================================
// Step 6: Key Scanning
// ============================================================================
console.log('Step 6: Key scanning with patterns...');
const scanKV = new KVStore<string>({
index: 'kv-scan',
enableCache: false,
});
await scanKV.initialize();
// Create test data
await scanKV.set('user:1:profile', 'Profile 1');
await scanKV.set('user:2:profile', 'Profile 2');
await scanKV.set('user:3:profile', 'Profile 3');
await scanKV.set('product:1:info', 'Product Info 1');
await scanKV.set('product:2:info', 'Product Info 2');
// Scan all user profiles
const userScan = await scanKV.scan({
pattern: 'user:*:profile',
limit: 10,
includeValues: false,
});
console.log(` User profiles found: ${userScan.keys.length}`);
console.log(` Keys: ${userScan.keys.join(', ')}`);
// Scan all products with values
const productScan = await scanKV.scan({
pattern: 'product:*',
limit: 10,
includeValues: true,
});
console.log(` Products found: ${productScan.keys.length}`);
console.log(` First product: ${productScan.values?.[0]}`);
// Scan with pagination
console.log(' Paginated scan:');
let cursor: string | undefined;
let page = 1;
do {
const result = await scanKV.scan({
limit: 2,
cursor,
includeValues: false,
});
console.log(` Page ${page}: ${result.keys.length} keys`);
cursor = result.nextCursor;
page++;
if (!result.hasMore) break;
} while (cursor && page <= 3);
console.log('✓ Key scanning complete\n');
// ============================================================================
// Step 7: Cache Eviction Policies
// ============================================================================
console.log('Step 7: Cache eviction policies...');
// LRU (Least Recently Used)
console.log(' Testing LRU eviction policy...');
const lruKV = new KVStore<number>({
index: 'kv-eviction-lru',
enableCache: true,
cacheMaxSize: 3,
cacheEvictionPolicy: 'lru',
});
await lruKV.initialize();
await lruKV.set('key1', 1);
await lruKV.set('key2', 2);
await lruKV.set('key3', 3);
// Access key1 (make it recently used)
await lruKV.get('key1');
// Add key4 (should evict key2, the least recently used)
await lruKV.set('key4', 4);
const stats = lruKV.getStats();
console.log(` Cache size: ${stats.cacheStats?.size}/${stats.cacheStats?.maxSize}`);
console.log(` Evictions: ${stats.cacheStats?.evictions}`);
// LFU (Least Frequently Used)
console.log(' Testing LFU eviction policy...');
const lfuKV = new KVStore<number>({
index: 'kv-eviction-lfu',
enableCache: true,
cacheMaxSize: 3,
cacheEvictionPolicy: 'lfu',
});
await lfuKV.initialize();
await lfuKV.set('key1', 1);
await lfuKV.set('key2', 2);
await lfuKV.set('key3', 3);
// Access key1 multiple times
await lfuKV.get('key1');
await lfuKV.get('key1');
await lfuKV.get('key1');
// Add key4 (should evict key2 or key3, the least frequently used)
await lfuKV.set('key4', 4);
const lfuStats = lfuKV.getStats();
console.log(` Cache size: ${lfuStats.cacheStats?.size}/${lfuStats.cacheStats?.maxSize}`);
console.log(` Evictions: ${lfuStats.cacheStats?.evictions}`);
console.log('✓ Cache eviction policies demonstrated\n');
// ============================================================================
// Step 8: Optimistic Concurrency
// ============================================================================
console.log('Step 8: Optimistic concurrency control...');
const concurrencyKV = new KVStore<{ count: number }>({
index: 'kv-concurrency',
enableOptimisticConcurrency: true,
enableCache: false,
});
await concurrencyKV.initialize();
// Set initial value
const initial = await concurrencyKV.set('counter', { count: 0 });
console.log(` Initial version: seq_no=${initial.version?.seqNo}`);
// Update with correct version
const update1 = await concurrencyKV.set('counter', { count: 1 }, {
ifSeqNo: initial.version?.seqNo,
ifPrimaryTerm: initial.version?.primaryTerm,
});
console.log(` Update 1 success: ${update1.success}`);
// Try to update with old version (should fail)
const update2 = await concurrencyKV.set('counter', { count: 999 }, {
ifSeqNo: initial.version?.seqNo, // Old version
ifPrimaryTerm: initial.version?.primaryTerm,
});
console.log(` Update 2 with old version success: ${update2.success}`);
if (!update2.success) {
console.log(` Error: ${update2.error?.type} - ${update2.error?.reason}`);
}
console.log('✓ Optimistic concurrency demonstrated\n');
// ============================================================================
// Step 9: Compression
// ============================================================================
console.log('Step 9: Automatic compression for large values...');
const compressionKV = new KVStore<{ data: string }>({
index: 'kv-compression',
enableCompression: true,
compressionThreshold: 100, // 100 bytes
enableCache: false,
});
await compressionKV.initialize();
// Small value (no compression)
await compressionKV.set('small', { data: 'Hello' });
// Large value (will be compressed)
const largeData = 'x'.repeat(1000);
await compressionKV.set('large', { data: largeData });
// Retrieve both
const smallResult = await compressionKV.get('small');
const largeResult = await compressionKV.get('large');
console.log(` Small value retrieved: ${smallResult.value?.data.substring(0, 10)}...`);
console.log(` Large value retrieved: ${largeResult.value?.data.substring(0, 10)}... (length: ${largeResult.value?.data.length})`);
console.log('✓ Compression demonstrated\n');
// ============================================================================
// Step 10: Statistics
// ============================================================================
console.log('Step 10: KV Store statistics...\n');
const finalStats = basicKV.getStats();
console.log('Basic KV Store Statistics:');
console.log(` Total keys: ${finalStats.totalKeys}`);
console.log(` Total gets: ${finalStats.totalGets}`);
console.log(` Total sets: ${finalStats.totalSets}`);
console.log(` Total deletes: ${finalStats.totalDeletes}`);
console.log(` Total scans: ${finalStats.totalScans}`);
console.log(` Total expired: ${finalStats.totalExpired}`);
console.log(` Avg get duration: ${finalStats.avgGetDurationMs.toFixed(2)}ms`);
console.log(` Avg set duration: ${finalStats.avgSetDurationMs.toFixed(2)}ms`);
console.log(` Avg delete duration: ${finalStats.avgDeleteDurationMs.toFixed(2)}ms`);
if (finalStats.cacheStats) {
console.log('\n Cache Statistics:');
console.log(` Size: ${finalStats.cacheStats.size}/${finalStats.cacheStats.maxSize}`);
console.log(` Hits: ${finalStats.cacheStats.hits}`);
console.log(` Misses: ${finalStats.cacheStats.misses}`);
console.log(` Hit ratio: ${(finalStats.cacheStats.hitRatio * 100).toFixed(2)}%`);
console.log(` Evictions: ${finalStats.cacheStats.evictions}`);
console.log(` Memory usage: ${(finalStats.cacheStats.memoryUsage / 1024).toFixed(2)} KB`);
}
console.log();
// ============================================================================
// Step 11: Cleanup
// ============================================================================
console.log('Step 11: Cleanup...');
await basicKV.destroy();
await ttlKV.destroy();
await batchKV.destroy();
await scanKV.destroy();
await lruKV.destroy();
await lfuKV.destroy();
await concurrencyKV.destroy();
await compressionKV.destroy();
await connectionManager.destroy();
console.log('✓ Cleanup complete\n');
console.log('=== KV Store Example Complete ===');
console.log('\nKey Features Demonstrated:');
console.log(' ✓ Basic get/set/delete operations');
console.log(' ✓ TTL (Time-To-Live) with automatic expiration');
console.log(' ✓ In-memory caching with hit/miss tracking');
console.log(' ✓ Batch operations (mget, mset, mdelete)');
console.log(' ✓ Key scanning with wildcard patterns');
console.log(' ✓ Cache eviction policies (LRU, LFU, FIFO, TTL)');
console.log(' ✓ Optimistic concurrency control');
console.log(' ✓ Automatic compression for large values');
console.log(' ✓ Comprehensive statistics');
console.log(' ✓ Cursor-based pagination');
}
// Run the example
main().catch((error) => {
console.error('Example failed:', error);
process.exit(1);
});

View File

@@ -0,0 +1,473 @@
/**
* Comprehensive Logging API Example
*
* Demonstrates enterprise logging with structured log ingestion
*/
import {
createConfig,
ElasticsearchConnectionManager,
LogLevel,
LogDestination,
addHostInfo,
addEnvironment,
addServiceInfo,
addTimestamp,
sanitizeSensitiveData,
addDynamicTags,
chainEnrichers,
} from '../../index.js';
import type { LogEntry } from '../../index.js';
async function main() {
console.log('=== Logging API Example ===\n');
// ============================================================================
// Step 1: Configuration
// ============================================================================
console.log('Step 1: Configuring Elasticsearch connection...');
const config = createConfig()
.fromEnv()
.nodes(process.env.ELASTICSEARCH_URL || 'http://localhost:9200')
.basicAuth(
process.env.ELASTICSEARCH_USERNAME || 'elastic',
process.env.ELASTICSEARCH_PASSWORD || 'changeme'
)
.timeout(30000)
.retries(3)
.logLevel(LogLevel.INFO)
.enableMetrics(true)
.enableTracing(true, { serviceName: 'logging-example', serviceVersion: '1.0.0' })
.build();
// ============================================================================
// Step 2: Initialize Connection
// ============================================================================
console.log('Step 2: Initializing connection manager...');
const connectionManager = ElasticsearchConnectionManager.getInstance(config);
await connectionManager.initialize();
console.log('✓ Connection manager initialized\n');
// ============================================================================
// Step 3: Basic Log Destination
// ============================================================================
console.log('Step 3: Creating basic log destination...');
const basicLogDest = new LogDestination({
indexPattern: 'logs-example-basic-{now/d}',
batchSize: 50,
flushIntervalMs: 3000,
autoCreateTemplate: true,
});
await basicLogDest.initialize();
console.log('✓ Basic log destination initialized');
// Send basic logs
await basicLogDest.send({
timestamp: new Date().toISOString(),
level: 'INFO',
message: 'Application started',
service: 'example-app',
version: '1.0.0',
});
await basicLogDest.send({
timestamp: new Date().toISOString(),
level: 'WARN',
message: 'High memory usage detected',
metadata: {
memoryUsage: '85%',
threshold: '80%',
},
});
await basicLogDest.send({
timestamp: new Date().toISOString(),
level: 'ERROR',
message: 'Failed to connect to database',
error: {
name: 'ConnectionError',
message: 'ECONNREFUSED',
stack: 'Error: ECONNREFUSED\\n at ...',
code: 'ECONNREFUSED',
},
});
await basicLogDest.flush();
console.log('✓ Basic logs sent (3 logs)');
console.log(' Stats:', basicLogDest.getStats());
console.log();
// ============================================================================
// Step 4: Log Destination with Enrichers
// ============================================================================
console.log('Step 4: Creating log destination with enrichers...');
const enrichedLogDest = new LogDestination({
indexPattern: 'logs-example-enriched-{now/d}',
batchSize: 100,
flushIntervalMs: 5000,
enrichers: [
addTimestamp,
addHostInfo,
addEnvironment,
addServiceInfo,
sanitizeSensitiveData([
{ path: 'metadata.password' },
{ path: 'metadata.apiKey' },
{ path: 'metadata.token' },
]),
],
});
await enrichedLogDest.initialize();
console.log('✓ Enriched log destination initialized');
// Send logs that will be enriched
await enrichedLogDest.send({
timestamp: new Date().toISOString(),
level: 'INFO',
message: 'User authenticated',
metadata: {
userId: 'user-123',
username: 'john.doe',
password: 'supersecret', // Will be sanitized
ipAddress: '192.168.1.100',
},
});
await enrichedLogDest.send({
timestamp: new Date().toISOString(),
level: 'INFO',
message: 'API request processed',
metadata: {
method: 'POST',
path: '/api/users',
statusCode: 201,
apiKey: 'sk-1234567890', // Will be sanitized
duration: 45,
},
});
await enrichedLogDest.flush();
console.log('✓ Enriched logs sent (2 logs)');
console.log(' Logs enriched with: timestamp, host, environment, service info');
console.log(' Sensitive data sanitized: password, apiKey');
console.log();
// ============================================================================
// Step 5: Sampling Strategies
// ============================================================================
console.log('Step 5: Demonstrating sampling strategies...');
// 5.1: Errors-only sampling
console.log('5.1: Errors-only sampling');
const errorsOnlyDest = new LogDestination({
indexPattern: 'logs-example-errors-{now/d}',
batchSize: 50,
flushIntervalMs: 3000,
sampling: {
strategy: 'errors-only',
},
});
await errorsOnlyDest.initialize();
// Send mixed logs
await errorsOnlyDest.send({
timestamp: new Date().toISOString(),
level: 'INFO',
message: 'This will be sampled out',
});
await errorsOnlyDest.send({
timestamp: new Date().toISOString(),
level: 'ERROR',
message: 'This error will be kept',
error: { name: 'Error', message: 'Something went wrong' },
});
await errorsOnlyDest.flush();
console.log(' Sent 2 logs (1 INFO, 1 ERROR)');
const errorsStats = errorsOnlyDest.getStats();
console.log(` Indexed: ${errorsStats.totalSuccessful}, Sampled out: ${errorsStats.totalSampled}`);
console.log();
// 5.2: Percentage sampling
console.log('5.2: Percentage sampling (10%)');
const percentageDest = new LogDestination({
indexPattern: 'logs-example-percentage-{now/d}',
batchSize: 50,
flushIntervalMs: 3000,
sampling: {
strategy: 'percentage',
percentage: 10,
alwaysSampleErrors: true,
},
});
await percentageDest.initialize();
// Send many logs
for (let i = 0; i < 100; i++) {
await percentageDest.send({
timestamp: new Date().toISOString(),
level: 'INFO',
message: `Log ${i}`,
});
}
// Send an error (should always be kept)
await percentageDest.send({
timestamp: new Date().toISOString(),
level: 'ERROR',
message: 'Error log (always kept)',
});
await percentageDest.flush();
const percentageStats = percentageDest.getStats();
console.log(` Sent 101 logs (100 INFO, 1 ERROR)`);
console.log(` Indexed: ${percentageStats.totalSuccessful} (~10% of INFO + 1 ERROR)`);
console.log(` Sampled out: ${percentageStats.totalSampled}`);
console.log();
// ============================================================================
// Step 6: Dynamic Tags
// ============================================================================
console.log('Step 6: Adding dynamic tags based on content...');
const taggedDest = new LogDestination({
indexPattern: 'logs-example-tagged-{now/d}',
batchSize: 50,
flushIntervalMs: 3000,
enrichers: [
addDynamicTags([
{
condition: (entry) => entry.level === 'ERROR',
tag: 'alert',
},
{
condition: (entry) =>
entry.message.toLowerCase().includes('payment') ||
entry.message.toLowerCase().includes('transaction'),
tag: 'financial',
},
{
condition: (entry) =>
entry.metadata?.statusCode !== undefined && (entry.metadata.statusCode as number) >= 500,
tag: 'server-error',
},
]),
],
});
await taggedDest.initialize();
await taggedDest.send({
timestamp: new Date().toISOString(),
level: 'INFO',
message: 'Payment processed successfully',
metadata: { amount: 99.99, currency: 'USD', statusCode: 200 },
});
await taggedDest.send({
timestamp: new Date().toISOString(),
level: 'ERROR',
message: 'Transaction failed',
metadata: { statusCode: 500 },
error: { name: 'PaymentError', message: 'Card declined' },
});
await taggedDest.flush();
console.log('✓ Tagged logs sent');
console.log(' First log tagged: [financial]');
console.log(' Second log tagged: [alert, financial, server-error]');
console.log();
// ============================================================================
// Step 7: ILM (Index Lifecycle Management)
// ============================================================================
console.log('Step 7: Creating log destination with ILM policy...');
const ilmDest = new LogDestination({
indexPattern: 'logs-example-ilm-{now/d}',
batchSize: 50,
flushIntervalMs: 3000,
ilm: {
name: 'logs-example-policy',
hotDuration: '7d',
warmDuration: '30d',
deleteDuration: '90d',
rollover: {
maxSize: '50gb',
maxAge: '1d',
},
},
});
await ilmDest.initialize();
console.log('✓ ILM policy created');
console.log(' Hot: 7 days → Warm: 30 days → Delete: 90 days');
console.log(' Rollover: 50GB or 1 day');
console.log();
// ============================================================================
// Step 8: Metric Extraction
// ============================================================================
console.log('Step 8: Extracting metrics from logs...');
const metricsDest = new LogDestination({
indexPattern: 'logs-example-metrics-{now/d}',
batchSize: 50,
flushIntervalMs: 3000,
metrics: [
{
name: 'api_response_time',
field: 'metrics.duration',
type: 'histogram',
labels: ['metadata.method', 'metadata.statusCode'],
},
{
name: 'errors_total',
field: 'level',
type: 'counter',
labels: ['error.name'],
},
],
});
await metricsDest.initialize();
// Send logs with metrics
await metricsDest.send({
timestamp: new Date().toISOString(),
level: 'INFO',
message: 'API request completed',
metadata: {
method: 'GET',
path: '/api/users',
statusCode: 200,
},
metrics: {
duration: 42,
},
});
await metricsDest.send({
timestamp: new Date().toISOString(),
level: 'ERROR',
message: 'API request failed',
metadata: {
method: 'POST',
statusCode: 500,
},
error: {
name: 'ValidationError',
message: 'Invalid input',
},
metrics: {
duration: 15,
},
});
await metricsDest.flush();
console.log('✓ Logs with metrics sent');
console.log(' Metrics extracted: api_response_time, errors_total');
console.log();
// ============================================================================
// Step 9: Chained Enrichers
// ============================================================================
console.log('Step 9: Using chained enrichers...');
const chainedDest = new LogDestination({
indexPattern: 'logs-example-chained-{now/d}',
batchSize: 50,
flushIntervalMs: 3000,
enrichers: [
chainEnrichers(
addTimestamp,
addHostInfo,
addEnvironment,
addServiceInfo,
sanitizeSensitiveData([{ path: 'metadata.secret' }]),
addDynamicTags([
{
condition: (entry) => entry.level === 'ERROR',
tag: 'needs-attention',
},
])
),
],
});
await chainedDest.initialize();
await chainedDest.send({
timestamp: new Date().toISOString(),
level: 'ERROR',
message: 'Critical error occurred',
metadata: {
secret: 'should-be-redacted',
component: 'auth-service',
},
});
await chainedDest.flush();
console.log('✓ Log sent through enrichment chain');
console.log(' Applied: timestamp → host → env → service → sanitize → tags');
console.log();
// ============================================================================
// Step 10: Statistics and Monitoring
// ============================================================================
console.log('Step 10: Reviewing statistics...\n');
const stats = enrichedLogDest.getStats();
console.log('Enriched Log Destination Stats:');
console.log(` Total logs: ${stats.totalLogs}`);
console.log(` Successfully indexed: ${stats.totalSuccessful}`);
console.log(` Failed: ${stats.totalFailed}`);
console.log(` Sampled out: ${stats.totalSampled}`);
console.log(` Dropped (queue overflow): ${stats.totalDropped}`);
console.log(` Current queue size: ${stats.queueSize}`);
console.log(` Avg batch duration: ${stats.avgBatchDurationMs.toFixed(2)}ms`);
if (stats.lastFlushAt) {
console.log(` Last flush: ${stats.lastFlushAt.toISOString()}`);
}
console.log();
// ============================================================================
// Step 11: Cleanup
// ============================================================================
console.log('Step 11: Cleanup...');
await basicLogDest.destroy();
await enrichedLogDest.destroy();
await errorsOnlyDest.destroy();
await percentageDest.destroy();
await taggedDest.destroy();
await ilmDest.destroy();
await metricsDest.destroy();
await chainedDest.destroy();
console.log('✓ All log destinations destroyed (flushed and closed)');
await connectionManager.destroy();
console.log('✓ Connection closed\n');
console.log('=== Logging API Example Complete ===');
console.log('\nKey Features Demonstrated:');
console.log(' ✓ Basic structured logging');
console.log(' ✓ Log enrichment (host, environment, service info)');
console.log(' ✓ Sensitive data sanitization');
console.log(' ✓ Sampling strategies (errors-only, percentage)');
console.log(' ✓ Dynamic tagging based on content');
console.log(' ✓ ILM (Index Lifecycle Management)');
console.log(' ✓ Metric extraction from logs');
console.log(' ✓ Chained enrichers');
console.log(' ✓ Batching and auto-flushing');
console.log(' ✓ Statistics and monitoring');
}
// Run the example
main().catch((error) => {
console.error('Example failed:', error);
process.exit(1);
});

View File

@@ -0,0 +1,401 @@
/**
* Comprehensive Plugin System Example
*
* Demonstrates extensible request/response middleware
*/
import {
createConfig,
ElasticsearchConnectionManager,
LogLevel,
createPluginManager,
createLoggingPlugin,
createMetricsPlugin,
createCachePlugin,
createRateLimitPlugin,
type Plugin,
type PluginContext,
type PluginResponse,
} from '../../index.js';
async function main() {
console.log('=== Plugin System Example ===\n');
// ============================================================================
// Step 1: Configuration
// ============================================================================
console.log('Step 1: Configuring Elasticsearch connection...');
const config = createConfig()
.fromEnv()
.nodes(process.env.ELASTICSEARCH_URL || 'http://localhost:9200')
.basicAuth(
process.env.ELASTICSEARCH_USERNAME || 'elastic',
process.env.ELASTICSEARCH_PASSWORD || 'changeme'
)
.timeout(30000)
.retries(3)
.logLevel(LogLevel.INFO)
.enableMetrics(true)
.build();
// ============================================================================
// Step 2: Initialize Connection and Plugin Manager
// ============================================================================
console.log('Step 2: Initializing connection and plugin manager...');
const connectionManager = ElasticsearchConnectionManager.getInstance(config);
await connectionManager.initialize();
const pluginManager = createPluginManager({
enabled: true,
maxHookDuration: 5000,
continueOnError: true,
collectStats: true,
});
// Set the client for plugin initialization
pluginManager.setClient(connectionManager.getClient());
console.log('✓ Connection and plugin manager initialized\n');
// ============================================================================
// Step 3: Register Built-in Plugins
// ============================================================================
console.log('Step 3: Registering built-in plugins...');
// Logging plugin - logs all requests/responses
await pluginManager.register(
createLoggingPlugin({
logRequests: true,
logResponses: true,
logErrors: true,
logRequestBody: true,
logResponseBody: false,
maxBodySize: 1024,
})
);
// Metrics plugin - collects request metrics
await pluginManager.register(
createMetricsPlugin({
enabled: true,
prefix: 'elasticsearch',
recordDuration: true,
recordSize: true,
recordResponseSize: true,
})
);
// Cache plugin - caches GET requests
await pluginManager.register(
createCachePlugin({
enabled: true,
maxEntries: 100,
defaultTTL: 60,
methods: ['GET'],
})
);
// Rate limit plugin - limits request rate
await pluginManager.register(
createRateLimitPlugin({
maxRequestsPerSecond: 10,
burstSize: 5,
waitForSlot: true,
maxWaitTime: 5000,
})
);
console.log('✓ Built-in plugins registered\n');
// ============================================================================
// Step 4: Create Custom Plugin
// ============================================================================
console.log('Step 4: Creating and registering custom plugin...');
const customPlugin: Plugin = {
name: 'request-id-injector',
version: '1.0.0',
priority: 5, // Execute very early
beforeRequest: (context: PluginContext) => {
// Add custom header to all requests
if (!context.request.headers) {
context.request.headers = {};
}
context.request.headers['X-Custom-Request-ID'] = context.request.requestId;
context.request.headers['X-Client-Version'] = '3.0.0';
console.log(` [Custom Plugin] Added headers to request ${context.request.requestId}`);
return context;
},
afterResponse: <T>(context: PluginContext, response: PluginResponse<T>) => {
console.log(
` [Custom Plugin] Response received for ${context.request.requestId} with status ${response.statusCode}`
);
return response;
},
onError: (context) => {
console.log(
` [Custom Plugin] Error occurred for ${context.request.requestId}: ${context.error.message}`
);
// Don't handle error
return null;
},
};
await pluginManager.register(customPlugin);
console.log('✓ Custom plugin registered\n');
// ============================================================================
// Step 5: Create Transformation Plugin
// ============================================================================
console.log('Step 5: Creating transformation plugin...');
const transformPlugin: Plugin = {
name: 'response-transformer',
version: '1.0.0',
priority: 80, // Execute late, after most plugins
afterResponse: <T>(context: PluginContext, response: PluginResponse<T>) => {
// Add metadata to all responses
const transformedResponse = { ...response };
if (typeof transformedResponse.body === 'object' && transformedResponse.body !== null) {
(transformedResponse.body as any)._metadata = {
requestId: context.request.requestId,
duration: Date.now() - context.request.startTime,
timestamp: new Date().toISOString(),
};
}
console.log(` [Transform Plugin] Added metadata to response`);
return transformedResponse;
},
};
await pluginManager.register(transformPlugin);
console.log('✓ Transformation plugin registered\n');
// ============================================================================
// Step 6: Demonstrate Plugin Execution
// ============================================================================
console.log('Step 6: Demonstrating plugin execution...\n');
// Simulate a request context
const mockContext: PluginContext = {
client: connectionManager.getClient(),
request: {
method: 'GET',
path: '/test-index/_search',
body: { query: { match_all: {} } },
requestId: `req-${Date.now()}`,
startTime: Date.now(),
},
shared: new Map(),
config: {},
};
// Execute beforeRequest hooks
console.log(' Executing beforeRequest hooks...');
const modifiedContext = await pluginManager.executeBeforeRequest(mockContext);
if (modifiedContext) {
console.log(` ✓ Request context modified by ${pluginManager.getPlugins().length} plugins`);
console.log(` Headers added:`, modifiedContext.request.headers);
} else {
console.log(' ✗ Request cancelled by plugin');
}
// Simulate a response
const mockResponse: PluginResponse = {
body: {
took: 5,
hits: {
total: { value: 0 },
hits: [],
},
},
statusCode: 200,
headers: {},
};
// Execute afterResponse hooks
console.log('\n Executing afterResponse hooks...');
const modifiedResponse = await pluginManager.executeAfterResponse(
modifiedContext!,
mockResponse
);
console.log(` ✓ Response modified by plugins`);
console.log(` Metadata added:`, (modifiedResponse.body as any)._metadata);
console.log();
// ============================================================================
// Step 7: Plugin Statistics
// ============================================================================
console.log('Step 7: Plugin statistics...\n');
const stats = pluginManager.getStats();
for (const [pluginName, pluginStats] of stats) {
console.log(`Plugin: ${pluginName}`);
console.log(` beforeRequest calls: ${pluginStats.beforeRequestCalls}`);
console.log(` afterResponse calls: ${pluginStats.afterResponseCalls}`);
console.log(` onError calls: ${pluginStats.onErrorCalls}`);
console.log(
` Avg beforeRequest duration: ${pluginStats.avgBeforeRequestDuration.toFixed(2)}ms`
);
console.log(
` Avg afterResponse duration: ${pluginStats.avgAfterResponseDuration.toFixed(2)}ms`
);
console.log(` Errors: ${pluginStats.errors}`);
console.log();
}
// ============================================================================
// Step 8: Plugin Priority Demonstration
// ============================================================================
console.log('Step 8: Demonstrating plugin priority...\n');
const plugins = pluginManager.getPlugins();
const sortedPlugins = plugins.sort((a, b) => (a.priority ?? 100) - (b.priority ?? 100));
console.log('Plugins in execution order (by priority):');
for (const plugin of sortedPlugins) {
console.log(` ${plugin.priority ?? 100}: ${plugin.name}`);
}
console.log();
// ============================================================================
// Step 9: Dynamic Plugin Management
// ============================================================================
console.log('Step 9: Dynamic plugin management...');
// Unregister a plugin
console.log(' Unregistering cache plugin...');
await pluginManager.unregister('cache');
console.log(` ✓ Cache plugin unregistered (${pluginManager.getPlugins().length} remaining)`);
// Register it again
console.log(' Re-registering cache plugin...');
await pluginManager.register(
createCachePlugin({
enabled: true,
maxEntries: 50,
defaultTTL: 30,
})
);
console.log(` ✓ Cache plugin re-registered (${pluginManager.getPlugins().length} total)`);
console.log();
// ============================================================================
// Step 10: Error Handling
// ============================================================================
console.log('Step 10: Demonstrating error handling...\n');
const mockError = new Error('Connection timeout');
const errorContext = {
...mockContext,
error: mockError,
attempts: 1,
};
console.log(' Executing onError hooks...');
const errorResponse = await pluginManager.executeOnError(errorContext);
if (errorResponse) {
console.log(' ✓ Error handled by plugin');
} else {
console.log(' ✓ Error logged but not handled');
}
console.log();
// ============================================================================
// Step 11: Creating a Plugin Factory
// ============================================================================
console.log('Step 11: Creating reusable plugin factory...\n');
function createTimingPlugin(threshold: number = 1000): Plugin {
return {
name: `slow-request-detector-${threshold}`,
version: '1.0.0',
priority: 100,
afterResponse: <T>(context: PluginContext, response: PluginResponse<T>) => {
const duration = Date.now() - context.request.startTime;
if (duration > threshold) {
console.log(
` [Timing Plugin] SLOW REQUEST DETECTED: ${context.request.path} took ${duration}ms (threshold: ${threshold}ms)`
);
}
return response;
},
};
}
// Create and register timing plugin with custom threshold
await pluginManager.register(createTimingPlugin(500));
console.log('✓ Timing plugin factory demonstrated\n');
// ============================================================================
// Step 12: Cleanup
// ============================================================================
console.log('Step 12: Cleanup...');
// Clear statistics
pluginManager.clearStats();
// Destroy all plugins
await pluginManager.destroy();
await connectionManager.destroy();
console.log('✓ Cleanup complete\n');
console.log('=== Plugin System Example Complete ===');
console.log('\nKey Features Demonstrated:');
console.log(' ✓ Plugin registration and lifecycle');
console.log(' ✓ Built-in plugins (logging, metrics, cache, rate-limit)');
console.log(' ✓ Custom plugin creation');
console.log(' ✓ Request/response transformation');
console.log(' ✓ Plugin priority and execution order');
console.log(' ✓ Dynamic plugin management (register/unregister)');
console.log(' ✓ Error handling hooks');
console.log(' ✓ Plugin statistics collection');
console.log(' ✓ Plugin factories for reusable patterns');
console.log(' ✓ Shared context between plugins');
console.log(' ✓ Request cancellation (rate limiting)');
}
// Run the example
main().catch((error) => {
console.error('Example failed:', error);
process.exit(1);
});

View File

@@ -0,0 +1,418 @@
/**
* Comprehensive Query Builder Example
*
* Demonstrates type-safe query construction with the QueryBuilder
*/
import {
createConfig,
ElasticsearchConnectionManager,
LogLevel,
} from '../../core/index.js';
import { DocumentManager } from '../../domain/documents/index.js';
import { QueryBuilder, createQuery } from '../../domain/query/index.js';
interface Product {
name: string;
description: string;
category: string;
brand: string;
price: number;
rating: number;
stock: number;
tags: string[];
createdAt: Date;
updatedAt: Date;
}
async function main() {
console.log('=== Query Builder Example ===\n');
// ============================================================================
// Step 1: Configuration
// ============================================================================
console.log('Step 1: Configuring Elasticsearch connection...');
const config = createConfig()
.fromEnv()
.nodes(process.env.ELASTICSEARCH_URL || 'http://localhost:9200')
.basicAuth(
process.env.ELASTICSEARCH_USERNAME || 'elastic',
process.env.ELASTICSEARCH_PASSWORD || 'changeme'
)
.timeout(30000)
.retries(3)
.logLevel(LogLevel.INFO)
.enableMetrics(true)
.enableTracing(true, { serviceName: 'query-example', serviceVersion: '1.0.0' })
.build();
// ============================================================================
// Step 2: Initialize Connection
// ============================================================================
console.log('Step 2: Initializing connection manager...');
const connectionManager = ElasticsearchConnectionManager.getInstance(config);
await connectionManager.initialize();
console.log('✓ Connection manager initialized\n');
// ============================================================================
// Step 3: Setup Sample Data
// ============================================================================
console.log('Step 3: Setting up sample data...');
const products = new DocumentManager<Product>({
index: 'products-query-example',
autoCreateIndex: true,
});
await products.initialize();
// Create sample products
const sampleProducts: Array<{ id: string; data: Product }> = [
{
id: 'laptop-1',
data: {
name: 'Professional Laptop Pro',
description: 'High-performance laptop for professionals',
category: 'Electronics',
brand: 'TechBrand',
price: 1299.99,
rating: 4.5,
stock: 15,
tags: ['laptop', 'professional', 'high-end'],
createdAt: new Date('2024-01-15'),
updatedAt: new Date('2024-01-20'),
},
},
{
id: 'laptop-2',
data: {
name: 'Budget Laptop Basic',
description: 'Affordable laptop for everyday use',
category: 'Electronics',
brand: 'ValueBrand',
price: 499.99,
rating: 3.8,
stock: 30,
tags: ['laptop', 'budget', 'student'],
createdAt: new Date('2024-02-01'),
updatedAt: new Date('2024-02-05'),
},
},
{
id: 'phone-1',
data: {
name: 'Smartphone X',
description: 'Latest flagship smartphone',
category: 'Electronics',
brand: 'PhoneBrand',
price: 899.99,
rating: 4.7,
stock: 25,
tags: ['smartphone', 'flagship', '5g'],
createdAt: new Date('2024-01-20'),
updatedAt: new Date('2024-01-25'),
},
},
{
id: 'tablet-1',
data: {
name: 'Tablet Pro',
description: 'Professional tablet for creative work',
category: 'Electronics',
brand: 'TechBrand',
price: 799.99,
rating: 4.6,
stock: 20,
tags: ['tablet', 'creative', 'professional'],
createdAt: new Date('2024-02-10'),
updatedAt: new Date('2024-02-15'),
},
},
{
id: 'monitor-1',
data: {
name: '4K Monitor',
description: 'Ultra HD monitor for gaming and design',
category: 'Electronics',
brand: 'DisplayBrand',
price: 599.99,
rating: 4.4,
stock: 12,
tags: ['monitor', '4k', 'gaming'],
createdAt: new Date('2024-01-25'),
updatedAt: new Date('2024-01-30'),
},
},
];
// Index sample data
const session = products.session();
session.start();
for (const product of sampleProducts) {
session.upsert(product.id, product.data);
}
await session.commit();
console.log(`✓ Indexed ${sampleProducts.length} sample products\n`);
// Wait for indexing to complete
await new Promise((resolve) => setTimeout(resolve, 1000));
// ============================================================================
// Step 4: Simple Queries
// ============================================================================
console.log('Step 4: Running simple queries...\n');
// 4.1: Match query - search by name
console.log('4.1: Match query - search for "laptop"');
const laptopResults = await createQuery<Product>('products-query-example')
.match('name', 'laptop')
.size(10)
.execute();
console.log(`Found ${laptopResults.hits.total.value} laptops`);
console.log('Laptops:', laptopResults.hits.hits.map((h) => h._source.name));
console.log();
// 4.2: Term query - exact match on category
console.log('4.2: Term query - exact category match');
const electronicsResults = await createQuery<Product>('products-query-example')
.term('category.keyword', 'Electronics')
.execute();
console.log(`Found ${electronicsResults.hits.total.value} electronics`);
console.log();
// 4.3: Range query - price between 500 and 1000
console.log('4.3: Range query - price between $500 and $1000');
const midPriceResults = await createQuery<Product>('products-query-example')
.range('price', { gte: 500, lte: 1000 })
.sort('price', 'asc')
.execute();
console.log(`Found ${midPriceResults.hits.total.value} products in price range`);
midPriceResults.hits.hits.forEach((hit) => {
console.log(` - ${hit._source.name}: $${hit._source.price}`);
});
console.log();
// 4.4: Multi-match query - search across multiple fields
console.log('4.4: Multi-match query - search "professional" in name and description');
const professionalResults = await createQuery<Product>('products-query-example')
.multiMatch('professional', ['name', 'description'])
.execute();
console.log(`Found ${professionalResults.hits.total.value} professional products`);
console.log();
// ============================================================================
// Step 5: Boolean Queries
// ============================================================================
console.log('Step 5: Running boolean queries...\n');
// 5.1: Must + Filter - combine multiple conditions
console.log('5.1: Boolean query - TechBrand products over $700');
const techBrandResults = await createQuery<Product>('products-query-example')
.term('brand.keyword', 'TechBrand')
.range('price', { gte: 700 })
.sort('price', 'desc')
.execute();
console.log(`Found ${techBrandResults.hits.total.value} matching products`);
techBrandResults.hits.hits.forEach((hit) => {
console.log(` - ${hit._source.name} (${hit._source.brand}): $${hit._source.price}`);
});
console.log();
// 5.2: Should clause - match any condition
console.log('5.2: Should query - products matching "laptop" OR "tablet"');
const laptopOrTabletResults = await new QueryBuilder<Product>('products-query-example')
.should({ match: { name: { query: 'laptop' } } })
.should({ match: { name: { query: 'tablet' } } })
.minimumMatch(1)
.execute();
console.log(`Found ${laptopOrTabletResults.hits.total.value} laptops or tablets`);
console.log();
// 5.3: Must not - exclude results
console.log('5.3: Must not query - electronics excluding laptops');
const noLaptopsResults = await createQuery<Product>('products-query-example')
.term('category.keyword', 'Electronics')
.mustNot({ match: { name: { query: 'laptop' } } })
.execute();
console.log(`Found ${noLaptopsResults.hits.total.value} non-laptop electronics`);
console.log();
// ============================================================================
// Step 6: Aggregations
// ============================================================================
console.log('Step 6: Running aggregations...\n');
// 6.1: Terms aggregation - group by brand
console.log('6.1: Terms aggregation - products by brand');
const brandAggResults = await createQuery<Product>('products-query-example')
.matchAll()
.size(0) // We only want aggregations, not documents
.aggregations((agg) => {
agg.terms('brands', 'brand.keyword', { size: 10 });
})
.execute();
if (brandAggResults.aggregations && 'brands' in brandAggResults.aggregations) {
const brandsAgg = brandAggResults.aggregations.brands as { buckets: Array<{ key: string; doc_count: number }> };
console.log('Products by brand:');
brandsAgg.buckets.forEach((bucket) => {
console.log(` - ${bucket.key}: ${bucket.doc_count} products`);
});
}
console.log();
// 6.2: Metric aggregations - price statistics
console.log('6.2: Metric aggregations - price statistics');
const priceStatsResults = await createQuery<Product>('products-query-example')
.matchAll()
.size(0)
.aggregations((agg) => {
agg.stats('price_stats', 'price');
agg.avg('avg_rating', 'rating');
agg.sum('total_stock', 'stock');
})
.execute();
if (priceStatsResults.aggregations) {
console.log('Price statistics:', priceStatsResults.aggregations.price_stats);
console.log('Average rating:', priceStatsResults.aggregations.avg_rating);
console.log('Total stock:', priceStatsResults.aggregations.total_stock);
}
console.log();
// 6.3: Nested aggregations - brands with average price
console.log('6.3: Nested aggregations - average price per brand');
const nestedAggResults = await createQuery<Product>('products-query-example')
.matchAll()
.size(0)
.aggregations((agg) => {
agg.terms('brands', 'brand.keyword', { size: 10 }).subAggregation('avg_price', (sub) => {
sub.avg('avg_price', 'price');
});
})
.execute();
if (nestedAggResults.aggregations && 'brands' in nestedAggResults.aggregations) {
const brandsAgg = nestedAggResults.aggregations.brands as {
buckets: Array<{ key: string; doc_count: number; avg_price: { value: number } }>;
};
console.log('Average price by brand:');
brandsAgg.buckets.forEach((bucket) => {
console.log(` - ${bucket.key}: $${bucket.avg_price.value.toFixed(2)} (${bucket.doc_count} products)`);
});
}
console.log();
// ============================================================================
// Step 7: Advanced Features
// ============================================================================
console.log('Step 7: Advanced query features...\n');
// 7.1: Pagination
console.log('7.1: Pagination - page 1 of results (2 per page)');
const page1Results = await createQuery<Product>('products-query-example')
.matchAll()
.paginate(1, 2)
.sort('price', 'asc')
.execute();
console.log(`Page 1: ${page1Results.hits.hits.length} results`);
page1Results.hits.hits.forEach((hit) => {
console.log(` - ${hit._source.name}: $${hit._source.price}`);
});
console.log();
// 7.2: Source filtering - only return specific fields
console.log('7.2: Source filtering - only name and price');
const filteredResults = await createQuery<Product>('products-query-example')
.matchAll()
.fields(['name', 'price'])
.size(3)
.execute();
console.log('Filtered results:');
filteredResults.hits.hits.forEach((hit) => {
console.log(` - Name: ${hit._source.name}, Price: ${hit._source.price}`);
});
console.log();
// 7.3: Count documents
console.log('7.3: Count documents matching query');
const count = await createQuery<Product>('products-query-example')
.range('price', { gte: 500 })
.count();
console.log(`Count of products over $500: ${count}`);
console.log();
// 7.4: Get only sources (convenience method)
console.log('7.4: Get sources only');
const sources = await createQuery<Product>('products-query-example')
.term('brand.keyword', 'TechBrand')
.executeAndGetSources();
console.log(`TechBrand products: ${sources.map((s) => s.name).join(', ')}`);
console.log();
// ============================================================================
// Step 8: Complex Real-World Query
// ============================================================================
console.log('Step 8: Complex real-world query...\n');
console.log('Finding high-rated electronics in stock, sorted by best deals:');
const complexResults = await createQuery<Product>('products-query-example')
.term('category.keyword', 'Electronics')
.range('rating', { gte: 4.0 })
.range('stock', { gt: 0 })
.range('price', { lte: 1000 })
.sort('rating', 'desc')
.size(5)
.aggregations((agg) => {
agg.terms('top_brands', 'brand.keyword', { size: 5 });
agg.avg('avg_price', 'price');
agg.max('max_rating', 'rating');
})
.execute();
console.log(`Found ${complexResults.hits.total.value} matching products`);
console.log('\nTop results:');
complexResults.hits.hits.forEach((hit, index) => {
console.log(` ${index + 1}. ${hit._source.name}`);
console.log(` Brand: ${hit._source.brand}`);
console.log(` Price: $${hit._source.price}`);
console.log(` Rating: ${hit._source.rating}`);
console.log(` Stock: ${hit._source.stock} units`);
});
if (complexResults.aggregations) {
console.log('\nAggregated insights:');
console.log(' Average price:', complexResults.aggregations.avg_price);
console.log(' Max rating:', complexResults.aggregations.max_rating);
if ('top_brands' in complexResults.aggregations) {
const topBrands = complexResults.aggregations.top_brands as { buckets: Array<{ key: string; doc_count: number }> };
console.log(' Top brands:');
topBrands.buckets.forEach((bucket) => {
console.log(` - ${bucket.key}: ${bucket.doc_count} products`);
});
}
}
console.log();
// ============================================================================
// Step 9: Cleanup
// ============================================================================
console.log('Step 9: Cleanup...');
await products.deleteIndex();
console.log('✓ Test index deleted');
await connectionManager.destroy();
console.log('✓ Connection closed\n');
console.log('=== Query Builder Example Complete ===');
}
// Run the example
main().catch((error) => {
console.error('Example failed:', error);
process.exit(1);
});

View File

@@ -0,0 +1,449 @@
/**
* Comprehensive Transaction Example
*
* Demonstrates distributed transactions with ACID-like semantics
*/
import {
createConfig,
ElasticsearchConnectionManager,
LogLevel,
createTransactionManager,
type TransactionCallbacks,
type ConflictInfo,
} from '../../index.js';
interface BankAccount {
accountId: string;
balance: number;
currency: string;
lastUpdated: Date;
}
interface Order {
orderId: string;
customerId: string;
items: Array<{ productId: string; quantity: number; price: number }>;
total: number;
status: 'pending' | 'confirmed' | 'cancelled';
createdAt: Date;
}
interface Inventory {
productId: string;
quantity: number;
reserved: number;
lastUpdated: Date;
}
async function main() {
console.log('=== Transaction System Example ===\n');
// ============================================================================
// Step 1: Configuration
// ============================================================================
console.log('Step 1: Configuring Elasticsearch connection...');
const config = createConfig()
.fromEnv()
.nodes(process.env.ELASTICSEARCH_URL || 'http://localhost:9200')
.basicAuth(
process.env.ELASTICSEARCH_USERNAME || 'elastic',
process.env.ELASTICSEARCH_PASSWORD || 'changeme'
)
.timeout(30000)
.retries(3)
.logLevel(LogLevel.INFO)
.enableMetrics(true)
.build();
// ============================================================================
// Step 2: Initialize Connection and Transaction Manager
// ============================================================================
console.log('Step 2: Initializing connection and transaction manager...');
const connectionManager = ElasticsearchConnectionManager.getInstance(config);
await connectionManager.initialize();
const transactionManager = createTransactionManager({
defaultIsolationLevel: 'read_committed',
defaultLockingStrategy: 'optimistic',
defaultTimeout: 30000,
maxConcurrentTransactions: 100,
conflictResolution: 'retry',
enableLogging: true,
enableMetrics: true,
});
await transactionManager.initialize();
console.log('✓ Connection and transaction manager initialized\n');
// ============================================================================
// Step 3: Setup Test Data
// ============================================================================
console.log('Step 3: Setting up test data...');
const client = connectionManager.getClient();
// Create test indices
for (const index of ['accounts', 'orders', 'inventory']) {
try {
await client.indices.create({ index });
} catch (error) {
// Index might already exist
}
}
// Create test accounts
await client.index({
index: 'accounts',
id: 'acc-001',
document: {
accountId: 'acc-001',
balance: 1000,
currency: 'USD',
lastUpdated: new Date(),
},
});
await client.index({
index: 'accounts',
id: 'acc-002',
document: {
accountId: 'acc-002',
balance: 500,
currency: 'USD',
lastUpdated: new Date(),
},
});
// Create test inventory
await client.index({
index: 'inventory',
id: 'prod-001',
document: {
productId: 'prod-001',
quantity: 100,
reserved: 0,
lastUpdated: new Date(),
},
});
console.log('✓ Test data created\n');
// ============================================================================
// Step 4: Simple Transaction - Money Transfer
// ============================================================================
console.log('Step 4: Simple transaction - money transfer...');
const transferTxn = await transactionManager.begin({
isolationLevel: 'read_committed',
autoRollback: true,
});
try {
// Read source account
const sourceAccount = await transferTxn.read<BankAccount>('accounts', 'acc-001');
console.log(` Source balance before: $${sourceAccount?.balance}`);
// Read destination account
const destAccount = await transferTxn.read<BankAccount>('accounts', 'acc-002');
console.log(` Destination balance before: $${destAccount?.balance}`);
// Transfer amount
const transferAmount = 200;
if (!sourceAccount || sourceAccount.balance < transferAmount) {
throw new Error('Insufficient funds');
}
// Update source account
await transferTxn.update<BankAccount>('accounts', 'acc-001', {
balance: sourceAccount.balance - transferAmount,
lastUpdated: new Date(),
});
// Update destination account
await transferTxn.update<BankAccount>('accounts', 'acc-002', {
balance: destAccount!.balance + transferAmount,
lastUpdated: new Date(),
});
// Commit transaction
const result = await transferTxn.commit();
console.log(` ✓ Transfer completed`);
console.log(` Operations: ${result.operationsExecuted}`);
console.log(` Duration: ${result.duration}ms`);
} catch (error: any) {
console.log(` ✗ Transfer failed: ${error.message}`);
}
console.log();
// ============================================================================
// Step 5: Transaction with Rollback
// ============================================================================
console.log('Step 5: Transaction with rollback...');
const rollbackTxn = await transactionManager.begin({
autoRollback: true,
});
try {
const account = await rollbackTxn.read<BankAccount>('accounts', 'acc-001');
console.log(` Balance before: $${account?.balance}`);
// Update account
await rollbackTxn.update<BankAccount>('accounts', 'acc-001', {
balance: account!.balance + 500,
lastUpdated: new Date(),
});
// Simulate error
throw new Error('Simulated error - transaction will rollback');
} catch (error: any) {
console.log(` ✗ Error occurred: ${error.message}`);
const result = await rollbackTxn.rollback();
console.log(` ✓ Transaction rolled back`);
console.log(` Operations rolled back: ${result.operationsRolledBack}`);
}
// Verify balance unchanged
const accountAfter = await client.get({ index: 'accounts', id: 'acc-001' });
console.log(` Balance after rollback: $${(accountAfter._source as BankAccount).balance}`);
console.log();
// ============================================================================
// Step 6: Transaction with Savepoints
// ============================================================================
console.log('Step 6: Transaction with savepoints...');
const savepointTxn = await transactionManager.begin();
try {
const account = await savepointTxn.read<BankAccount>('accounts', 'acc-001');
console.log(` Initial balance: $${account?.balance}`);
// First operation
await savepointTxn.update<BankAccount>('accounts', 'acc-001', {
balance: account!.balance + 100,
});
console.log(' Operation 1: +$100');
// Create savepoint
savepointTxn.savepoint('after_first_op');
// Second operation
await savepointTxn.update<BankAccount>('accounts', 'acc-001', {
balance: account!.balance + 200,
});
console.log(' Operation 2: +$200');
// Rollback to savepoint (removes operation 2)
savepointTxn.rollbackTo('after_first_op');
console.log(' Rolled back to savepoint (operation 2 removed)');
// Commit transaction (only operation 1 will be committed)
await savepointTxn.commit();
console.log(' ✓ Transaction committed (only operation 1)');
} catch (error: any) {
console.log(` ✗ Error: ${error.message}`);
await savepointTxn.rollback();
}
console.log();
// ============================================================================
// Step 7: Concurrent Transactions with Conflict
// ============================================================================
console.log('Step 7: Concurrent transactions with conflict handling...');
let conflictsDetected = 0;
const callbacks: TransactionCallbacks = {
onConflict: async (conflict: ConflictInfo) => {
conflictsDetected++;
console.log(` ⚠ Conflict detected on ${conflict.operation.index}/${conflict.operation.id}`);
return 'retry'; // Automatically retry
},
};
// Start two concurrent transactions modifying the same document
const txn1 = transactionManager.begin({ maxRetries: 5 }, callbacks);
const txn2 = transactionManager.begin({ maxRetries: 5 }, callbacks);
const [transaction1, transaction2] = await Promise.all([txn1, txn2]);
try {
// Both read the same account
const [account1, account2] = await Promise.all([
transaction1.read<BankAccount>('accounts', 'acc-001'),
transaction2.read<BankAccount>('accounts', 'acc-001'),
]);
console.log(` Initial balance (txn1): $${account1?.balance}`);
console.log(` Initial balance (txn2): $${account2?.balance}`);
// Both try to update
await transaction1.update<BankAccount>('accounts', 'acc-001', {
balance: account1!.balance + 50,
});
await transaction2.update<BankAccount>('accounts', 'acc-001', {
balance: account2!.balance + 75,
});
// Commit both (one will conflict and retry)
const [result1, result2] = await Promise.all([
transaction1.commit(),
transaction2.commit(),
]);
console.log(` ✓ Transaction 1: ${result1.success ? 'committed' : 'failed'}`);
console.log(` ✓ Transaction 2: ${result2.success ? 'committed' : 'failed'}`);
console.log(` Conflicts detected and resolved: ${conflictsDetected}`);
} catch (error: any) {
console.log(` ✗ Error: ${error.message}`);
}
console.log();
// ============================================================================
// Step 8: Complex Multi-Document Transaction - Order Processing
// ============================================================================
console.log('Step 8: Complex multi-document transaction - order processing...');
const orderTxn = await transactionManager.begin({
isolationLevel: 'repeatable_read',
autoRollback: true,
});
try {
// Create order
const order: Order = {
orderId: 'ord-001',
customerId: 'cust-001',
items: [
{ productId: 'prod-001', quantity: 5, price: 10 },
],
total: 50,
status: 'pending',
createdAt: new Date(),
};
await orderTxn.create<Order>('orders', order.orderId, order);
console.log(' Created order');
// Check and reserve inventory
const inventory = await orderTxn.read<Inventory>('inventory', 'prod-001');
console.log(` Available inventory: ${inventory?.quantity}`);
if (!inventory || inventory.quantity < 5) {
throw new Error('Insufficient inventory');
}
await orderTxn.update<Inventory>('inventory', 'prod-001', {
quantity: inventory.quantity - 5,
reserved: inventory.reserved + 5,
lastUpdated: new Date(),
});
console.log(' Reserved inventory: 5 units');
// Charge customer account
const customerAccount = await orderTxn.read<BankAccount>('accounts', 'acc-001');
if (!customerAccount || customerAccount.balance < order.total) {
throw new Error('Insufficient funds');
}
await orderTxn.update<BankAccount>('accounts', 'acc-001', {
balance: customerAccount.balance - order.total,
lastUpdated: new Date(),
});
console.log(` Charged customer: $${order.total}`);
// Update order status
await orderTxn.update<Order>('orders', order.orderId, {
status: 'confirmed',
});
console.log(' Order confirmed');
// Commit all operations atomically
const result = await orderTxn.commit();
console.log(` ✓ Order processed successfully`);
console.log(` Operations: ${result.operationsExecuted}`);
console.log(` Duration: ${result.duration}ms`);
} catch (error: any) {
console.log(` ✗ Order processing failed: ${error.message}`);
console.log(' All changes rolled back');
}
console.log();
// ============================================================================
// Step 9: Transaction Statistics
// ============================================================================
console.log('Step 9: Transaction statistics...\n');
const stats = transactionManager.getStats();
console.log('Transaction Manager Statistics:');
console.log(` Total started: ${stats.totalStarted}`);
console.log(` Total committed: ${stats.totalCommitted}`);
console.log(` Total rolled back: ${stats.totalRolledBack}`);
console.log(` Total failed: ${stats.totalFailed}`);
console.log(` Total operations: ${stats.totalOperations}`);
console.log(` Total conflicts: ${stats.totalConflicts}`);
console.log(` Total retries: ${stats.totalRetries}`);
console.log(` Success rate: ${(stats.successRate * 100).toFixed(2)}%`);
console.log(` Avg duration: ${stats.avgDuration.toFixed(2)}ms`);
console.log(` Avg operations/txn: ${stats.avgOperationsPerTransaction.toFixed(2)}`);
console.log(` Active transactions: ${stats.activeTransactions}`);
console.log();
// ============================================================================
// Step 10: Cleanup
// ============================================================================
console.log('Step 10: Cleanup...');
await transactionManager.destroy();
await connectionManager.destroy();
console.log('✓ Cleanup complete\n');
console.log('=== Transaction System Example Complete ===');
console.log('\nKey Features Demonstrated:');
console.log(' ✓ ACID-like transaction semantics');
console.log(' ✓ Optimistic concurrency control');
console.log(' ✓ Automatic rollback on error');
console.log(' ✓ Compensation-based rollback');
console.log(' ✓ Savepoints for partial rollback');
console.log(' ✓ Conflict detection and retry');
console.log(' ✓ Multi-document transactions');
console.log(' ✓ Isolation levels (read_committed, repeatable_read)');
console.log(' ✓ Transaction callbacks and hooks');
console.log(' ✓ Comprehensive statistics');
}
// Run the example
main().catch((error) => {
console.error('Example failed:', error);
process.exit(1);
});