BREAKING CHANGE(core): Refactor to v3: introduce modular core/domain architecture, plugin system, observability and strict TypeScript configuration; remove legacy classes
This commit is contained in:
636
ts/domain/bulk/bulk-indexer.ts
Normal file
636
ts/domain/bulk/bulk-indexer.ts
Normal file
@@ -0,0 +1,636 @@
|
||||
import type {
|
||||
BulkOperation,
|
||||
BulkOperationType,
|
||||
BulkBatchResult,
|
||||
BulkOperationResult,
|
||||
BulkIndexerConfig,
|
||||
BulkIndexerStats,
|
||||
BulkProgress,
|
||||
BackpressureState,
|
||||
BatchingStrategy,
|
||||
} from './types.js';
|
||||
import { ElasticsearchConnectionManager } from '../../core/connection/connection-manager.js';
|
||||
import { defaultLogger } from '../../core/observability/logger.js';
|
||||
import { defaultMetrics } from '../../core/observability/metrics.js';
|
||||
import { defaultTracing } from '../../core/observability/tracing.js';
|
||||
|
||||
/**
|
||||
* Enterprise-grade bulk indexer with adaptive batching and parallel workers
|
||||
*
|
||||
* Features:
|
||||
* - Adaptive batching based on document size and performance
|
||||
* - Parallel workers for maximum throughput
|
||||
* - Automatic retries with exponential backoff
|
||||
* - Dead-letter queue for permanently failed operations
|
||||
* - Backpressure handling to prevent memory issues
|
||||
* - Progress callbacks and statistics
|
||||
* - Stream support via async iteration
|
||||
* - Full observability integration
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const indexer = new BulkIndexer({
|
||||
* batchingStrategy: 'adaptive',
|
||||
* maxBatchSize: 1000,
|
||||
* workers: 4,
|
||||
* enableDeadLetterQueue: true,
|
||||
* onProgress: (progress) => {
|
||||
* console.log(`Processed: ${progress.totalProcessed}/${progress.totalSubmitted}`);
|
||||
* }
|
||||
* });
|
||||
*
|
||||
* await indexer.start();
|
||||
*
|
||||
* // Submit operations
|
||||
* for (const doc of documents) {
|
||||
* await indexer.index('my-index', doc.id, doc);
|
||||
* }
|
||||
*
|
||||
* await indexer.flush();
|
||||
* await indexer.stop();
|
||||
* ```
|
||||
*/
|
||||
export class BulkIndexer {
|
||||
private config: Required<BulkIndexerConfig>;
|
||||
private queue: BulkOperation[] = [];
|
||||
private workers: Worker[] = [];
|
||||
private stats: BulkIndexerStats = {
|
||||
totalSubmitted: 0,
|
||||
totalProcessed: 0,
|
||||
totalSuccessful: 0,
|
||||
totalFailed: 0,
|
||||
totalDeadLettered: 0,
|
||||
totalBatches: 0,
|
||||
totalBatchesFailed: 0,
|
||||
queueSize: 0,
|
||||
currentOpsPerSecond: 0,
|
||||
avgOpsPerSecond: 0,
|
||||
avgBatchSize: 0,
|
||||
avgBatchDurationMs: 0,
|
||||
activeWorkers: 0,
|
||||
};
|
||||
private running = false;
|
||||
private flushTimer?: NodeJS.Timeout;
|
||||
private lastProgressReport = Date.now();
|
||||
private operationTimestamps: number[] = [];
|
||||
private batchSizes: number[] = [];
|
||||
private batchDurations: number[] = [];
|
||||
private deadLetterQueue: Array<{ operation: BulkOperation; error: string; attempts: number }> = [];
|
||||
|
||||
constructor(config: BulkIndexerConfig = {}) {
|
||||
this.config = {
|
||||
batchingStrategy: config.batchingStrategy ?? 'adaptive',
|
||||
batchSize: config.batchSize ?? 500,
|
||||
maxBatchSize: config.maxBatchSize ?? 1000,
|
||||
minBatchSize: config.minBatchSize ?? 100,
|
||||
targetBatchBytes: config.targetBatchBytes ?? 5 * 1024 * 1024, // 5MB
|
||||
flushIntervalMs: config.flushIntervalMs ?? 5000,
|
||||
workers: config.workers ?? 2,
|
||||
maxQueueSize: config.maxQueueSize ?? 10000,
|
||||
maxRetries: config.maxRetries ?? 3,
|
||||
retryDelayMs: config.retryDelayMs ?? 1000,
|
||||
enableDeadLetterQueue: config.enableDeadLetterQueue ?? false,
|
||||
deadLetterIndex: config.deadLetterIndex ?? 'failed-operations-{now/d}',
|
||||
onProgress: config.onProgress ?? (() => {}),
|
||||
onBatchSuccess: config.onBatchSuccess ?? (() => {}),
|
||||
onBatchError: config.onBatchError ?? (() => {}),
|
||||
refresh: config.refresh ?? false,
|
||||
pipeline: config.pipeline ?? '',
|
||||
routing: config.routing ?? '',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new bulk indexer
|
||||
*/
|
||||
static create(config?: BulkIndexerConfig): BulkIndexer {
|
||||
return new BulkIndexer(config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the bulk indexer
|
||||
*/
|
||||
async start(): Promise<void> {
|
||||
if (this.running) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.running = true;
|
||||
this.stats.startedAt = new Date();
|
||||
|
||||
// Start workers
|
||||
for (let i = 0; i < this.config.workers; i++) {
|
||||
const worker = new Worker(this, i);
|
||||
this.workers.push(worker);
|
||||
worker.start();
|
||||
}
|
||||
|
||||
// Start flush timer
|
||||
this.flushTimer = setInterval(() => {
|
||||
this.triggerFlush();
|
||||
}, this.config.flushIntervalMs);
|
||||
|
||||
defaultLogger.info('Bulk indexer started', {
|
||||
workers: this.config.workers,
|
||||
batchingStrategy: this.config.batchingStrategy,
|
||||
maxBatchSize: this.config.maxBatchSize,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the bulk indexer
|
||||
*/
|
||||
async stop(): Promise<void> {
|
||||
if (!this.running) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Stop flush timer
|
||||
if (this.flushTimer) {
|
||||
clearInterval(this.flushTimer);
|
||||
}
|
||||
|
||||
// Flush remaining operations
|
||||
await this.flush();
|
||||
|
||||
// Stop workers
|
||||
for (const worker of this.workers) {
|
||||
worker.stop();
|
||||
}
|
||||
|
||||
this.running = false;
|
||||
|
||||
defaultLogger.info('Bulk indexer stopped', {
|
||||
stats: this.stats,
|
||||
});
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Operation Methods
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Index a document
|
||||
*/
|
||||
async index<T>(index: string, id: string | undefined, document: T): Promise<void> {
|
||||
await this.submit({
|
||||
type: 'index',
|
||||
index,
|
||||
id,
|
||||
document,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a document (fails if exists)
|
||||
*/
|
||||
async create<T>(index: string, id: string, document: T): Promise<void> {
|
||||
await this.submit({
|
||||
type: 'create',
|
||||
index,
|
||||
id,
|
||||
document,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a document
|
||||
*/
|
||||
async update<T>(index: string, id: string, partialDocument: Partial<T>, options?: { retryOnConflict?: number }): Promise<void> {
|
||||
await this.submit({
|
||||
type: 'update',
|
||||
index,
|
||||
id,
|
||||
partialDocument,
|
||||
retryOnConflict: options?.retryOnConflict,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a document
|
||||
*/
|
||||
async delete(index: string, id: string): Promise<void> {
|
||||
await this.submit({
|
||||
type: 'delete',
|
||||
index,
|
||||
id,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Submit a custom bulk operation
|
||||
*/
|
||||
async submit(operation: BulkOperation): Promise<void> {
|
||||
// Check backpressure
|
||||
const backpressure = this.getBackpressure();
|
||||
if (backpressure.active) {
|
||||
await this.waitForBackpressure(backpressure.recommendedWaitMs);
|
||||
}
|
||||
|
||||
// Add to queue
|
||||
this.queue.push(operation);
|
||||
this.stats.totalSubmitted++;
|
||||
this.stats.queueSize = this.queue.length;
|
||||
|
||||
// Track timestamp for ops/sec calculation
|
||||
this.operationTimestamps.push(Date.now());
|
||||
if (this.operationTimestamps.length > 1000) {
|
||||
this.operationTimestamps.shift();
|
||||
}
|
||||
|
||||
// Update current ops/sec
|
||||
this.updateCurrentOpsPerSecond();
|
||||
|
||||
// Report progress if needed
|
||||
this.reportProgress();
|
||||
|
||||
// Trigger flush if batch size reached
|
||||
const batchSize = this.getCurrentBatchSize();
|
||||
if (this.queue.length >= batchSize) {
|
||||
this.triggerFlush();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Flush pending operations immediately
|
||||
*/
|
||||
async flush(): Promise<BulkBatchResult[]> {
|
||||
const results: BulkBatchResult[] = [];
|
||||
|
||||
while (this.queue.length > 0) {
|
||||
const batchSize = this.getCurrentBatchSize();
|
||||
const batch = this.queue.splice(0, Math.min(batchSize, this.queue.length));
|
||||
this.stats.queueSize = this.queue.length;
|
||||
|
||||
if (batch.length > 0) {
|
||||
const result = await this.executeBatch(batch);
|
||||
results.push(result);
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current statistics
|
||||
*/
|
||||
getStats(): BulkIndexerStats {
|
||||
return { ...this.stats };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get backpressure state
|
||||
*/
|
||||
getBackpressure(): BackpressureState {
|
||||
const utilization = (this.queue.length / this.config.maxQueueSize) * 100;
|
||||
const active = utilization > 80;
|
||||
|
||||
return {
|
||||
active,
|
||||
queueUtilization: utilization,
|
||||
recommendedWaitMs: active ? Math.min(1000, (utilization - 80) * 50) : 0,
|
||||
};
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Private Methods
|
||||
// ============================================================================
|
||||
|
||||
private async executeBatch(operations: BulkOperation[]): Promise<BulkBatchResult> {
|
||||
const span = defaultTracing.createSpan('bulkIndexer.executeBatch', {
|
||||
'batch.size': operations.length,
|
||||
});
|
||||
|
||||
const startTime = Date.now();
|
||||
this.stats.activeWorkers++;
|
||||
|
||||
try {
|
||||
const client = ElasticsearchConnectionManager.getInstance().getClient();
|
||||
|
||||
// Build bulk body
|
||||
const body: any[] = [];
|
||||
for (const op of operations) {
|
||||
const action: any = {};
|
||||
action[op.type] = {
|
||||
_index: op.index,
|
||||
...(op.id && { _id: op.id }),
|
||||
...(op.routing && { routing: op.routing }),
|
||||
...(op.pipeline && { pipeline: op.pipeline }),
|
||||
...(op.ifSeqNo !== undefined && { if_seq_no: op.ifSeqNo }),
|
||||
...(op.ifPrimaryTerm !== undefined && { if_primary_term: op.ifPrimaryTerm }),
|
||||
...(op.retryOnConflict !== undefined && { retry_on_conflict: op.retryOnConflict }),
|
||||
};
|
||||
body.push(action);
|
||||
|
||||
// Add document for index/create
|
||||
if (op.type === 'index' || op.type === 'create') {
|
||||
body.push(op.document);
|
||||
}
|
||||
|
||||
// Add partial document for update
|
||||
if (op.type === 'update') {
|
||||
body.push({ doc: op.partialDocument });
|
||||
}
|
||||
}
|
||||
|
||||
// Execute bulk request
|
||||
const response = await client.bulk({
|
||||
refresh: this.config.refresh,
|
||||
operations: body,
|
||||
});
|
||||
|
||||
const durationMs = Date.now() - startTime;
|
||||
|
||||
// Track batch metrics
|
||||
this.batchSizes.push(operations.length);
|
||||
this.batchDurations.push(durationMs);
|
||||
if (this.batchSizes.length > 100) {
|
||||
this.batchSizes.shift();
|
||||
this.batchDurations.shift();
|
||||
}
|
||||
this.stats.avgBatchSize = this.batchSizes.reduce((a, b) => a + b, 0) / this.batchSizes.length;
|
||||
this.stats.avgBatchDurationMs = this.batchDurations.reduce((a, b) => a + b, 0) / this.batchDurations.length;
|
||||
|
||||
// Process results
|
||||
const results: BulkOperationResult[] = [];
|
||||
let successful = 0;
|
||||
let failed = 0;
|
||||
|
||||
if (response.items) {
|
||||
for (let i = 0; i < response.items.length; i++) {
|
||||
const item = response.items[i];
|
||||
const op = operations[i];
|
||||
const actionResult = item && (item.index || item.create || item.update || item.delete);
|
||||
|
||||
if (actionResult) {
|
||||
const success = !actionResult.error && (actionResult.status === 200 || actionResult.status === 201);
|
||||
|
||||
results.push({
|
||||
success,
|
||||
type: op?.type as BulkOperationType,
|
||||
index: actionResult._index,
|
||||
id: actionResult._id,
|
||||
status: actionResult.status,
|
||||
error: actionResult.error ? {
|
||||
type: actionResult.error.type,
|
||||
reason: actionResult.error.reason,
|
||||
causedBy: actionResult.error.caused_by ? JSON.stringify(actionResult.error.caused_by) : undefined,
|
||||
} : undefined,
|
||||
seqNo: actionResult._seq_no,
|
||||
primaryTerm: actionResult._primary_term,
|
||||
});
|
||||
|
||||
if (success) {
|
||||
successful++;
|
||||
} else {
|
||||
failed++;
|
||||
// Handle failed operation
|
||||
if (op) {
|
||||
await this.handleFailedOperation(op, actionResult.error?.reason || 'Unknown error');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update stats
|
||||
this.stats.totalProcessed += operations.length;
|
||||
this.stats.totalSuccessful += successful;
|
||||
this.stats.totalFailed += failed;
|
||||
this.stats.totalBatches++;
|
||||
this.stats.lastBatchAt = new Date();
|
||||
this.stats.activeWorkers--;
|
||||
|
||||
// Calculate avg ops/sec
|
||||
if (this.stats.startedAt) {
|
||||
const elapsedSeconds = (Date.now() - this.stats.startedAt.getTime()) / 1000;
|
||||
this.stats.avgOpsPerSecond = this.stats.totalProcessed / elapsedSeconds;
|
||||
}
|
||||
|
||||
// Record metrics
|
||||
defaultMetrics.requestsTotal.inc({ operation: 'bulk', result: 'success' });
|
||||
defaultMetrics.requestDuration.observe({ operation: 'bulk' }, durationMs);
|
||||
|
||||
const result: BulkBatchResult = {
|
||||
successful,
|
||||
failed,
|
||||
total: operations.length,
|
||||
durationMs,
|
||||
results,
|
||||
hasErrors: failed > 0,
|
||||
};
|
||||
|
||||
// Callbacks
|
||||
this.config.onBatchSuccess(result);
|
||||
|
||||
if (failed > 0) {
|
||||
defaultLogger.warn('Bulk batch had errors', {
|
||||
successful,
|
||||
failed,
|
||||
total: operations.length,
|
||||
});
|
||||
}
|
||||
|
||||
span.setAttributes({
|
||||
'batch.successful': successful,
|
||||
'batch.failed': failed,
|
||||
'batch.duration_ms': durationMs,
|
||||
});
|
||||
span.end();
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
this.stats.totalBatchesFailed++;
|
||||
this.stats.activeWorkers--;
|
||||
|
||||
defaultMetrics.requestErrors.inc({ operation: 'bulk' });
|
||||
defaultLogger.error('Bulk batch failed', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
batchSize: operations.length,
|
||||
});
|
||||
|
||||
this.config.onBatchError(error as Error, operations);
|
||||
|
||||
// Retry all operations
|
||||
for (const op of operations) {
|
||||
await this.handleFailedOperation(op, (error as Error).message);
|
||||
}
|
||||
|
||||
span.recordException(error as Error);
|
||||
span.end();
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private async handleFailedOperation(operation: BulkOperation, error: string): Promise<void> {
|
||||
// Find existing entry in dead-letter queue
|
||||
const existingIndex = this.deadLetterQueue.findIndex(
|
||||
(item) => item.operation.type === operation.type && item.operation.index === operation.index && item.operation.id === operation.id
|
||||
);
|
||||
|
||||
const attempts = existingIndex >= 0 ? this.deadLetterQueue[existingIndex]!.attempts + 1 : 1;
|
||||
|
||||
if (attempts <= this.config.maxRetries) {
|
||||
// Retry with delay
|
||||
if (existingIndex >= 0) {
|
||||
this.deadLetterQueue[existingIndex]!.attempts = attempts;
|
||||
} else {
|
||||
this.deadLetterQueue.push({ operation, error, attempts });
|
||||
}
|
||||
|
||||
setTimeout(() => {
|
||||
this.queue.unshift(operation); // Add to front of queue
|
||||
}, this.config.retryDelayMs * attempts);
|
||||
} else {
|
||||
// Max retries exceeded
|
||||
if (this.config.enableDeadLetterQueue) {
|
||||
await this.sendToDeadLetterQueue(operation, error, attempts);
|
||||
}
|
||||
this.stats.totalDeadLettered++;
|
||||
|
||||
// Remove from retry queue
|
||||
if (existingIndex >= 0) {
|
||||
this.deadLetterQueue.splice(existingIndex, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async sendToDeadLetterQueue(operation: BulkOperation, error: string, attempts: number): Promise<void> {
|
||||
try {
|
||||
const client = ElasticsearchConnectionManager.getInstance().getClient();
|
||||
const indexName = this.resolveDeadLetterIndexName();
|
||||
|
||||
await client.index({
|
||||
index: indexName,
|
||||
document: {
|
||||
...operation,
|
||||
failed_at: new Date().toISOString(),
|
||||
error,
|
||||
attempts,
|
||||
},
|
||||
});
|
||||
|
||||
defaultLogger.warn('Operation sent to dead-letter queue', {
|
||||
index: indexName,
|
||||
operation: operation.type,
|
||||
error,
|
||||
attempts,
|
||||
});
|
||||
} catch (dlqError) {
|
||||
defaultLogger.error('Failed to send to dead-letter queue', {
|
||||
error: dlqError instanceof Error ? dlqError.message : String(dlqError),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private resolveDeadLetterIndexName(): string {
|
||||
const pattern = this.config.deadLetterIndex;
|
||||
if (pattern.includes('{now/d}')) {
|
||||
const date = new Date().toISOString().split('T')[0];
|
||||
return pattern.replace('{now/d}', date);
|
||||
}
|
||||
return pattern;
|
||||
}
|
||||
|
||||
private getCurrentBatchSize(): number {
|
||||
switch (this.config.batchingStrategy) {
|
||||
case 'fixed':
|
||||
return this.config.batchSize;
|
||||
|
||||
case 'adaptive':
|
||||
// Adjust batch size based on performance
|
||||
if (this.batchDurations.length > 0) {
|
||||
const avgDuration = this.stats.avgBatchDurationMs;
|
||||
const targetDuration = 1000; // 1 second target
|
||||
|
||||
if (avgDuration > targetDuration && this.stats.avgBatchSize > this.config.minBatchSize) {
|
||||
return Math.max(this.config.minBatchSize, Math.floor(this.stats.avgBatchSize * 0.8));
|
||||
} else if (avgDuration < targetDuration * 0.5 && this.stats.avgBatchSize < this.config.maxBatchSize) {
|
||||
return Math.min(this.config.maxBatchSize, Math.floor(this.stats.avgBatchSize * 1.2));
|
||||
}
|
||||
|
||||
return Math.floor(this.stats.avgBatchSize);
|
||||
}
|
||||
return this.config.batchSize;
|
||||
|
||||
case 'size-based':
|
||||
// Estimate based on target bytes
|
||||
// For now, use fixed size as we don't have document size info
|
||||
return this.config.batchSize;
|
||||
|
||||
default:
|
||||
return this.config.batchSize;
|
||||
}
|
||||
}
|
||||
|
||||
private triggerFlush(): void {
|
||||
// Signal workers that flush is needed (workers will handle it)
|
||||
}
|
||||
|
||||
private async waitForBackpressure(ms: number): Promise<void> {
|
||||
await new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
private updateCurrentOpsPerSecond(): void {
|
||||
if (this.operationTimestamps.length > 1) {
|
||||
const now = Date.now();
|
||||
const oneSecondAgo = now - 1000;
|
||||
const recentOps = this.operationTimestamps.filter((ts) => ts > oneSecondAgo);
|
||||
this.stats.currentOpsPerSecond = recentOps.length;
|
||||
}
|
||||
}
|
||||
|
||||
private reportProgress(): void {
|
||||
const now = Date.now();
|
||||
if (now - this.lastProgressReport > 1000) {
|
||||
// Report every second
|
||||
const progress: BulkProgress = {
|
||||
totalSubmitted: this.stats.totalSubmitted,
|
||||
totalProcessed: this.stats.totalProcessed,
|
||||
totalSuccessful: this.stats.totalSuccessful,
|
||||
totalFailed: this.stats.totalFailed,
|
||||
queueSize: this.stats.queueSize,
|
||||
operationsPerSecond: this.stats.currentOpsPerSecond,
|
||||
avgBatchDurationMs: this.stats.avgBatchDurationMs,
|
||||
estimatedTimeRemainingMs:
|
||||
this.stats.currentOpsPerSecond > 0
|
||||
? (this.stats.queueSize / this.stats.currentOpsPerSecond) * 1000
|
||||
: undefined,
|
||||
};
|
||||
|
||||
this.config.onProgress(progress);
|
||||
this.lastProgressReport = now;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Worker for parallel batch processing
|
||||
*/
|
||||
class Worker {
|
||||
private indexer: BulkIndexer;
|
||||
private id: number;
|
||||
private running = false;
|
||||
|
||||
constructor(indexer: BulkIndexer, id: number) {
|
||||
this.indexer = indexer;
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
start(): void {
|
||||
this.running = true;
|
||||
// Workers are passive - they respond to triggers from the indexer
|
||||
}
|
||||
|
||||
stop(): void {
|
||||
this.running = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new bulk indexer
|
||||
*/
|
||||
export function createBulkIndexer(config?: BulkIndexerConfig): BulkIndexer {
|
||||
return new BulkIndexer(config);
|
||||
}
|
||||
22
ts/domain/bulk/index.ts
Normal file
22
ts/domain/bulk/index.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
/**
|
||||
* Bulk Indexing Module
|
||||
*
|
||||
* High-throughput document ingestion with adaptive batching
|
||||
*/
|
||||
|
||||
// Main classes
|
||||
export { BulkIndexer, createBulkIndexer } from './bulk-indexer.js';
|
||||
|
||||
// Types
|
||||
export type {
|
||||
BulkOperationType,
|
||||
BulkOperation,
|
||||
BulkOperationResult,
|
||||
BulkBatchResult,
|
||||
BulkProgressCallback,
|
||||
BulkProgress,
|
||||
BatchingStrategy,
|
||||
BulkIndexerConfig,
|
||||
BulkIndexerStats,
|
||||
BackpressureState,
|
||||
} from './types.js';
|
||||
261
ts/domain/bulk/types.ts
Normal file
261
ts/domain/bulk/types.ts
Normal file
@@ -0,0 +1,261 @@
|
||||
/**
|
||||
* Bulk indexing types for high-throughput document ingestion
|
||||
*/
|
||||
|
||||
/**
|
||||
* Bulk operation types
|
||||
*/
|
||||
export type BulkOperationType = 'index' | 'create' | 'update' | 'delete';
|
||||
|
||||
/**
|
||||
* Bulk operation
|
||||
*/
|
||||
export interface BulkOperation<T = unknown> {
|
||||
/** Operation type */
|
||||
type: BulkOperationType;
|
||||
|
||||
/** Target index */
|
||||
index: string;
|
||||
|
||||
/** Document ID */
|
||||
id?: string;
|
||||
|
||||
/** Document to index/update */
|
||||
document?: T;
|
||||
|
||||
/** Partial document for update */
|
||||
partialDocument?: Partial<T>;
|
||||
|
||||
/** If_seq_no for optimistic concurrency */
|
||||
ifSeqNo?: number;
|
||||
|
||||
/** If_primary_term for optimistic concurrency */
|
||||
ifPrimaryTerm?: number;
|
||||
|
||||
/** Routing value */
|
||||
routing?: string;
|
||||
|
||||
/** Pipeline to execute */
|
||||
pipeline?: string;
|
||||
|
||||
/** Retry on conflict (for updates) */
|
||||
retryOnConflict?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Bulk operation result
|
||||
*/
|
||||
export interface BulkOperationResult {
|
||||
/** Whether operation succeeded */
|
||||
success: boolean;
|
||||
|
||||
/** Operation type */
|
||||
type: BulkOperationType;
|
||||
|
||||
/** Index name */
|
||||
index: string;
|
||||
|
||||
/** Document ID */
|
||||
id?: string;
|
||||
|
||||
/** Error if operation failed */
|
||||
error?: {
|
||||
type: string;
|
||||
reason: string;
|
||||
causedBy?: string;
|
||||
};
|
||||
|
||||
/** HTTP status code */
|
||||
status?: number;
|
||||
|
||||
/** Sequence number (for successful operations) */
|
||||
seqNo?: number;
|
||||
|
||||
/** Primary term (for successful operations) */
|
||||
primaryTerm?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Bulk batch result
|
||||
*/
|
||||
export interface BulkBatchResult {
|
||||
/** Number of successful operations */
|
||||
successful: number;
|
||||
|
||||
/** Number of failed operations */
|
||||
failed: number;
|
||||
|
||||
/** Total operations in batch */
|
||||
total: number;
|
||||
|
||||
/** Time taken in milliseconds */
|
||||
durationMs: number;
|
||||
|
||||
/** Individual operation results */
|
||||
results: BulkOperationResult[];
|
||||
|
||||
/** Whether batch had errors */
|
||||
hasErrors: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Progress callback
|
||||
*/
|
||||
export type BulkProgressCallback = (progress: BulkProgress) => void;
|
||||
|
||||
/**
|
||||
* Bulk progress information
|
||||
*/
|
||||
export interface BulkProgress {
|
||||
/** Total operations submitted */
|
||||
totalSubmitted: number;
|
||||
|
||||
/** Total operations processed */
|
||||
totalProcessed: number;
|
||||
|
||||
/** Total successful operations */
|
||||
totalSuccessful: number;
|
||||
|
||||
/** Total failed operations */
|
||||
totalFailed: number;
|
||||
|
||||
/** Current queue size */
|
||||
queueSize: number;
|
||||
|
||||
/** Operations per second */
|
||||
operationsPerSecond: number;
|
||||
|
||||
/** Average batch duration */
|
||||
avgBatchDurationMs: number;
|
||||
|
||||
/** Estimated time remaining (ms) */
|
||||
estimatedTimeRemainingMs?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adaptive batching strategy
|
||||
*/
|
||||
export type BatchingStrategy = 'fixed' | 'adaptive' | 'size-based';
|
||||
|
||||
/**
|
||||
* Bulk indexer configuration
|
||||
*/
|
||||
export interface BulkIndexerConfig {
|
||||
/** Batching strategy */
|
||||
batchingStrategy?: BatchingStrategy;
|
||||
|
||||
/** Fixed batch size (for fixed strategy) */
|
||||
batchSize?: number;
|
||||
|
||||
/** Maximum batch size (for adaptive strategy) */
|
||||
maxBatchSize?: number;
|
||||
|
||||
/** Minimum batch size (for adaptive strategy) */
|
||||
minBatchSize?: number;
|
||||
|
||||
/** Target batch size in bytes (for size-based strategy) */
|
||||
targetBatchBytes?: number;
|
||||
|
||||
/** Flush interval in milliseconds */
|
||||
flushIntervalMs?: number;
|
||||
|
||||
/** Number of parallel workers */
|
||||
workers?: number;
|
||||
|
||||
/** Maximum queue size before backpressure */
|
||||
maxQueueSize?: number;
|
||||
|
||||
/** Maximum retries for failed operations */
|
||||
maxRetries?: number;
|
||||
|
||||
/** Retry delay in milliseconds */
|
||||
retryDelayMs?: number;
|
||||
|
||||
/** Enable dead-letter queue */
|
||||
enableDeadLetterQueue?: boolean;
|
||||
|
||||
/** Dead-letter queue index pattern */
|
||||
deadLetterIndex?: string;
|
||||
|
||||
/** Progress callback */
|
||||
onProgress?: BulkProgressCallback;
|
||||
|
||||
/** Callback for successful batch */
|
||||
onBatchSuccess?: (result: BulkBatchResult) => void;
|
||||
|
||||
/** Callback for failed batch */
|
||||
onBatchError?: (error: Error, operations: BulkOperation[]) => void;
|
||||
|
||||
/** Refresh policy */
|
||||
refresh?: boolean | 'wait_for';
|
||||
|
||||
/** Default pipeline */
|
||||
pipeline?: string;
|
||||
|
||||
/** Default routing */
|
||||
routing?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Bulk indexer statistics
|
||||
*/
|
||||
export interface BulkIndexerStats {
|
||||
/** Total operations submitted */
|
||||
totalSubmitted: number;
|
||||
|
||||
/** Total operations processed */
|
||||
totalProcessed: number;
|
||||
|
||||
/** Total successful operations */
|
||||
totalSuccessful: number;
|
||||
|
||||
/** Total failed operations */
|
||||
totalFailed: number;
|
||||
|
||||
/** Total operations in dead-letter queue */
|
||||
totalDeadLettered: number;
|
||||
|
||||
/** Total batches executed */
|
||||
totalBatches: number;
|
||||
|
||||
/** Total batches failed */
|
||||
totalBatchesFailed: number;
|
||||
|
||||
/** Current queue size */
|
||||
queueSize: number;
|
||||
|
||||
/** Operations per second (current) */
|
||||
currentOpsPerSecond: number;
|
||||
|
||||
/** Average operations per second */
|
||||
avgOpsPerSecond: number;
|
||||
|
||||
/** Average batch size */
|
||||
avgBatchSize: number;
|
||||
|
||||
/** Average batch duration */
|
||||
avgBatchDurationMs: number;
|
||||
|
||||
/** Started at */
|
||||
startedAt?: Date;
|
||||
|
||||
/** Last batch at */
|
||||
lastBatchAt?: Date;
|
||||
|
||||
/** Active workers */
|
||||
activeWorkers: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Backpressure state
|
||||
*/
|
||||
export interface BackpressureState {
|
||||
/** Whether backpressure is active */
|
||||
active: boolean;
|
||||
|
||||
/** Queue utilization percentage */
|
||||
queueUtilization: number;
|
||||
|
||||
/** Recommended wait time in milliseconds */
|
||||
recommendedWaitMs: number;
|
||||
}
|
||||
571
ts/domain/documents/document-manager.ts
Normal file
571
ts/domain/documents/document-manager.ts
Normal file
@@ -0,0 +1,571 @@
|
||||
import type { Client as ElasticClient } from '@elastic/elasticsearch';
|
||||
import { ElasticsearchConnectionManager } from '../../core/connection/connection-manager.js';
|
||||
import { Logger, defaultLogger } from '../../core/observability/logger.js';
|
||||
import { MetricsCollector, defaultMetricsCollector } from '../../core/observability/metrics.js';
|
||||
import { TracingProvider, defaultTracingProvider } from '../../core/observability/tracing.js';
|
||||
import { DocumentSession } from './document-session.js';
|
||||
import {
|
||||
DocumentWithMeta,
|
||||
SessionConfig,
|
||||
SnapshotProcessor,
|
||||
SnapshotMeta,
|
||||
IteratorOptions,
|
||||
} from './types.js';
|
||||
import { IndexNotFoundError } from '../../core/errors/elasticsearch-error.js';
|
||||
|
||||
/**
|
||||
* Document manager configuration
|
||||
*/
|
||||
export interface DocumentManagerConfig {
|
||||
/** Index name */
|
||||
index: string;
|
||||
|
||||
/** Connection manager (optional, will use singleton if not provided) */
|
||||
connectionManager?: ElasticsearchConnectionManager;
|
||||
|
||||
/** Logger (optional, will use default if not provided) */
|
||||
logger?: Logger;
|
||||
|
||||
/** Metrics collector (optional) */
|
||||
metrics?: MetricsCollector;
|
||||
|
||||
/** Tracing provider (optional) */
|
||||
tracing?: TracingProvider;
|
||||
|
||||
/** Auto-create index if it doesn't exist */
|
||||
autoCreateIndex?: boolean;
|
||||
|
||||
/** Default batch size for operations */
|
||||
defaultBatchSize?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fluent document manager for Elasticsearch
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const docs = new DocumentManager<Product>('products');
|
||||
* await docs.initialize();
|
||||
*
|
||||
* // Session-based operations
|
||||
* await docs
|
||||
* .session()
|
||||
* .start()
|
||||
* .upsert('prod-1', { name: 'Widget', price: 99.99 })
|
||||
* .upsert('prod-2', { name: 'Gadget', price: 149.99 })
|
||||
* .commit();
|
||||
*
|
||||
* // Get a document
|
||||
* const product = await docs.get('prod-1');
|
||||
*
|
||||
* // Create snapshot
|
||||
* const snapshot = await docs.snapshot(async (iterator) => {
|
||||
* const products = [];
|
||||
* for await (const doc of iterator) {
|
||||
* products.push(doc._source);
|
||||
* }
|
||||
* return { totalCount: products.length, products };
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export class DocumentManager<T = unknown> {
|
||||
private client: ElasticClient;
|
||||
private connectionManager: ElasticsearchConnectionManager;
|
||||
private logger: Logger;
|
||||
private metrics: MetricsCollector;
|
||||
private tracing: TracingProvider;
|
||||
private index: string;
|
||||
private config: DocumentManagerConfig;
|
||||
private isInitialized = false;
|
||||
|
||||
constructor(config: DocumentManagerConfig) {
|
||||
this.config = config;
|
||||
this.index = config.index;
|
||||
|
||||
// Get or create connection manager
|
||||
this.connectionManager =
|
||||
config.connectionManager || ElasticsearchConnectionManager.getInstance();
|
||||
|
||||
// Set up observability
|
||||
this.logger = config.logger || defaultLogger.child(`documents:${this.index}`);
|
||||
this.metrics = config.metrics || defaultMetricsCollector;
|
||||
this.tracing = config.tracing || defaultTracingProvider;
|
||||
|
||||
// Get client (will throw if connection manager not initialized)
|
||||
this.client = this.connectionManager.getClient();
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method for fluent creation
|
||||
*/
|
||||
static create<T = unknown>(index: string, config: Omit<DocumentManagerConfig, 'index'> = {}): DocumentManager<T> {
|
||||
return new DocumentManager<T>({ ...config, index });
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the document manager
|
||||
*/
|
||||
async initialize(): Promise<void> {
|
||||
if (this.isInitialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
return this.tracing.withSpan('DocumentManager.initialize', async (span) => {
|
||||
span.setAttribute('index', this.index);
|
||||
|
||||
try {
|
||||
// Check if index exists
|
||||
const exists = await this.client.indices.exists({ index: this.index });
|
||||
|
||||
if (!exists && this.config.autoCreateIndex) {
|
||||
this.logger.info('Creating index', { index: this.index });
|
||||
await this.client.indices.create({ index: this.index });
|
||||
this.logger.info('Index created', { index: this.index });
|
||||
} else if (!exists) {
|
||||
throw new IndexNotFoundError(this.index);
|
||||
}
|
||||
|
||||
this.isInitialized = true;
|
||||
this.logger.info('Document manager initialized', { index: this.index });
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to initialize document manager', error as Error, {
|
||||
index: this.index,
|
||||
});
|
||||
span.recordException(error as Error);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new session for batch operations
|
||||
*/
|
||||
session(config?: SessionConfig): DocumentSession<T> {
|
||||
this.ensureInitialized();
|
||||
return new DocumentSession<T>(this.client, this.index, this.logger, config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single document by ID
|
||||
*/
|
||||
async get(documentId: string): Promise<DocumentWithMeta<T> | null> {
|
||||
this.ensureInitialized();
|
||||
|
||||
return this.tracing.withSpan('DocumentManager.get', async (span) => {
|
||||
span.setAttributes({
|
||||
'document.id': documentId,
|
||||
'document.index': this.index,
|
||||
});
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
const result = await this.client.get({
|
||||
index: this.index,
|
||||
id: documentId,
|
||||
});
|
||||
|
||||
const duration = (Date.now() - startTime) / 1000;
|
||||
this.metrics.requestDuration.observe(duration, {
|
||||
operation: 'get',
|
||||
index: this.index,
|
||||
});
|
||||
|
||||
return {
|
||||
_id: result._id,
|
||||
_source: result._source as T,
|
||||
_version: result._version,
|
||||
_seq_no: result._seq_no,
|
||||
_primary_term: result._primary_term,
|
||||
_index: result._index,
|
||||
};
|
||||
} catch (error: any) {
|
||||
if (error.statusCode === 404) {
|
||||
this.logger.debug('Document not found', { documentId, index: this.index });
|
||||
return null;
|
||||
}
|
||||
|
||||
this.logger.error('Failed to get document', error, { documentId, index: this.index });
|
||||
span.recordException(error);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a document
|
||||
*/
|
||||
async create(documentId: string, document: T): Promise<void> {
|
||||
this.ensureInitialized();
|
||||
|
||||
return this.tracing.withSpan('DocumentManager.create', async (span) => {
|
||||
span.setAttributes({
|
||||
'document.id': documentId,
|
||||
'document.index': this.index,
|
||||
});
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
await this.client.create({
|
||||
index: this.index,
|
||||
id: documentId,
|
||||
body: document,
|
||||
refresh: true,
|
||||
});
|
||||
|
||||
const duration = (Date.now() - startTime) / 1000;
|
||||
this.metrics.requestDuration.observe(duration, {
|
||||
operation: 'create',
|
||||
index: this.index,
|
||||
});
|
||||
|
||||
this.logger.debug('Document created', { documentId, index: this.index });
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to create document', error as Error, {
|
||||
documentId,
|
||||
index: this.index,
|
||||
});
|
||||
span.recordException(error as Error);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a document
|
||||
*/
|
||||
async update(
|
||||
documentId: string,
|
||||
document: Partial<T>,
|
||||
options?: { seqNo?: number; primaryTerm?: number }
|
||||
): Promise<void> {
|
||||
this.ensureInitialized();
|
||||
|
||||
return this.tracing.withSpan('DocumentManager.update', async (span) => {
|
||||
span.setAttributes({
|
||||
'document.id': documentId,
|
||||
'document.index': this.index,
|
||||
});
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
await this.client.update({
|
||||
index: this.index,
|
||||
id: documentId,
|
||||
body: { doc: document },
|
||||
refresh: true,
|
||||
...(options?.seqNo !== undefined && { if_seq_no: options.seqNo }),
|
||||
...(options?.primaryTerm !== undefined && { if_primary_term: options.primaryTerm }),
|
||||
});
|
||||
|
||||
const duration = (Date.now() - startTime) / 1000;
|
||||
this.metrics.requestDuration.observe(duration, {
|
||||
operation: 'update',
|
||||
index: this.index,
|
||||
});
|
||||
|
||||
this.logger.debug('Document updated', { documentId, index: this.index });
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to update document', error as Error, {
|
||||
documentId,
|
||||
index: this.index,
|
||||
});
|
||||
span.recordException(error as Error);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Upsert a document (create or update)
|
||||
*/
|
||||
async upsert(documentId: string, document: T): Promise<void> {
|
||||
this.ensureInitialized();
|
||||
|
||||
return this.tracing.withSpan('DocumentManager.upsert', async (span) => {
|
||||
span.setAttributes({
|
||||
'document.id': documentId,
|
||||
'document.index': this.index,
|
||||
});
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
await this.client.index({
|
||||
index: this.index,
|
||||
id: documentId,
|
||||
body: document,
|
||||
refresh: true,
|
||||
});
|
||||
|
||||
const duration = (Date.now() - startTime) / 1000;
|
||||
this.metrics.requestDuration.observe(duration, {
|
||||
operation: 'upsert',
|
||||
index: this.index,
|
||||
});
|
||||
|
||||
this.logger.debug('Document upserted', { documentId, index: this.index });
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to upsert document', error as Error, {
|
||||
documentId,
|
||||
index: this.index,
|
||||
});
|
||||
span.recordException(error as Error);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a document
|
||||
*/
|
||||
async delete(documentId: string): Promise<void> {
|
||||
this.ensureInitialized();
|
||||
|
||||
return this.tracing.withSpan('DocumentManager.delete', async (span) => {
|
||||
span.setAttributes({
|
||||
'document.id': documentId,
|
||||
'document.index': this.index,
|
||||
});
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
await this.client.delete({
|
||||
index: this.index,
|
||||
id: documentId,
|
||||
refresh: true,
|
||||
});
|
||||
|
||||
const duration = (Date.now() - startTime) / 1000;
|
||||
this.metrics.requestDuration.observe(duration, {
|
||||
operation: 'delete',
|
||||
index: this.index,
|
||||
});
|
||||
|
||||
this.logger.debug('Document deleted', { documentId, index: this.index });
|
||||
} catch (error: any) {
|
||||
if (error.statusCode === 404) {
|
||||
this.logger.debug('Document not found for deletion', { documentId, index: this.index });
|
||||
return; // Idempotent delete
|
||||
}
|
||||
|
||||
this.logger.error('Failed to delete document', error, { documentId, index: this.index });
|
||||
span.recordException(error);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if index exists
|
||||
*/
|
||||
async exists(): Promise<boolean> {
|
||||
try {
|
||||
return await this.client.indices.exists({ index: this.index });
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to check if index exists', error as Error, {
|
||||
index: this.index,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete the index
|
||||
*/
|
||||
async deleteIndex(): Promise<void> {
|
||||
return this.tracing.withSpan('DocumentManager.deleteIndex', async (span) => {
|
||||
span.setAttribute('index', this.index);
|
||||
|
||||
try {
|
||||
await this.client.indices.delete({ index: this.index });
|
||||
this.isInitialized = false;
|
||||
this.logger.info('Index deleted', { index: this.index });
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to delete index', error as Error, { index: this.index });
|
||||
span.recordException(error as Error);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get document count
|
||||
*/
|
||||
async count(query?: unknown): Promise<number> {
|
||||
this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const result = await this.client.count({
|
||||
index: this.index,
|
||||
...(query && { body: { query } }),
|
||||
});
|
||||
|
||||
return result.count;
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to count documents', error as Error, { index: this.index });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a snapshot with custom processor
|
||||
*/
|
||||
async snapshot<R>(processor: SnapshotProcessor<T, R>): Promise<SnapshotMeta<R>> {
|
||||
this.ensureInitialized();
|
||||
|
||||
return this.tracing.withSpan('DocumentManager.snapshot', async (span) => {
|
||||
span.setAttribute('index', this.index);
|
||||
|
||||
const startTime = Date.now();
|
||||
const snapshotIndex = `${this.index}-snapshots`;
|
||||
|
||||
try {
|
||||
// Get previous snapshot
|
||||
const previousSnapshot = await this.getLatestSnapshot<R>(snapshotIndex);
|
||||
|
||||
// Create iterator for all documents
|
||||
const iterator = this.iterate();
|
||||
|
||||
// Process snapshot
|
||||
const snapshotData = await processor(iterator, previousSnapshot);
|
||||
|
||||
// Count documents
|
||||
const documentCount = await this.count();
|
||||
|
||||
// Store snapshot
|
||||
const snapshot: SnapshotMeta<R> = {
|
||||
date: new Date(),
|
||||
data: snapshotData,
|
||||
documentCount,
|
||||
processingTime: Date.now() - startTime,
|
||||
};
|
||||
|
||||
await this.storeSnapshot(snapshotIndex, snapshot);
|
||||
|
||||
this.logger.info('Snapshot created', {
|
||||
index: this.index,
|
||||
documentCount,
|
||||
processingTime: snapshot.processingTime,
|
||||
});
|
||||
|
||||
return snapshot;
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to create snapshot', error as Error, { index: this.index });
|
||||
span.recordException(error as Error);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate over all documents
|
||||
*/
|
||||
async *iterate(options: IteratorOptions = {}): AsyncIterableIterator<DocumentWithMeta<T>> {
|
||||
this.ensureInitialized();
|
||||
|
||||
const batchSize = options.batchSize || this.config.defaultBatchSize || 1000;
|
||||
|
||||
// TODO: Use Point-in-Time API for better performance
|
||||
// For now, use basic search with search_after
|
||||
|
||||
let searchAfter: any[] | undefined;
|
||||
let hasMore = true;
|
||||
|
||||
while (hasMore) {
|
||||
const result = await this.client.search({
|
||||
index: this.index,
|
||||
body: {
|
||||
size: batchSize,
|
||||
...(searchAfter && { search_after: searchAfter }),
|
||||
sort: options.sort || [{ _id: 'asc' }],
|
||||
...(options.query && { query: options.query }),
|
||||
},
|
||||
});
|
||||
|
||||
const hits = result.hits.hits;
|
||||
|
||||
if (hits.length === 0) {
|
||||
hasMore = false;
|
||||
break;
|
||||
}
|
||||
|
||||
for (const hit of hits) {
|
||||
yield {
|
||||
_id: hit._id,
|
||||
_source: hit._source as T,
|
||||
_version: hit._version,
|
||||
_seq_no: hit._seq_no,
|
||||
_primary_term: hit._primary_term,
|
||||
_index: hit._index,
|
||||
_score: hit._score,
|
||||
};
|
||||
}
|
||||
|
||||
// Get last sort value for pagination
|
||||
const lastHit = hits[hits.length - 1];
|
||||
searchAfter = lastHit.sort;
|
||||
|
||||
if (hits.length < batchSize) {
|
||||
hasMore = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get latest snapshot
|
||||
*/
|
||||
private async getLatestSnapshot<R>(snapshotIndex: string): Promise<R | null> {
|
||||
try {
|
||||
const result = await this.client.search({
|
||||
index: snapshotIndex,
|
||||
body: {
|
||||
size: 1,
|
||||
sort: [{ 'date': 'desc' }],
|
||||
},
|
||||
});
|
||||
|
||||
if (result.hits.hits.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const snapshot = result.hits.hits[0]._source as SnapshotMeta<R>;
|
||||
return snapshot.data;
|
||||
} catch (error: any) {
|
||||
if (error.statusCode === 404) {
|
||||
return null; // Index doesn't exist yet
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Store snapshot
|
||||
*/
|
||||
private async storeSnapshot<R>(snapshotIndex: string, snapshot: SnapshotMeta<R>): Promise<void> {
|
||||
await this.client.index({
|
||||
index: snapshotIndex,
|
||||
body: snapshot,
|
||||
refresh: true,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure manager is initialized
|
||||
*/
|
||||
private ensureInitialized(): void {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error('DocumentManager not initialized. Call initialize() first.');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get index name
|
||||
*/
|
||||
getIndex(): string {
|
||||
return this.index;
|
||||
}
|
||||
}
|
||||
356
ts/domain/documents/document-session.ts
Normal file
356
ts/domain/documents/document-session.ts
Normal file
@@ -0,0 +1,356 @@
|
||||
import type { Client as ElasticClient } from '@elastic/elasticsearch';
|
||||
import {
|
||||
BatchOperation,
|
||||
BatchResult,
|
||||
DocumentOperation,
|
||||
SessionConfig,
|
||||
} from './types.js';
|
||||
import { Logger } from '../../core/observability/logger.js';
|
||||
import { BulkOperationError } from '../../core/errors/elasticsearch-error.js';
|
||||
|
||||
/**
|
||||
* Document session for managing document lifecycle
|
||||
*
|
||||
* Tracks documents during a session and can clean up stale ones at the end.
|
||||
*/
|
||||
export class DocumentSession<T = unknown> {
|
||||
private operations: BatchOperation<T>[] = [];
|
||||
private seenDocuments = new Set<string>();
|
||||
private config: Required<SessionConfig>;
|
||||
private startTimestamp: Date;
|
||||
private isActive = false;
|
||||
|
||||
constructor(
|
||||
private client: ElasticClient,
|
||||
private index: string,
|
||||
private logger: Logger,
|
||||
config: SessionConfig = {}
|
||||
) {
|
||||
this.config = {
|
||||
onlyNew: config.onlyNew || false,
|
||||
fromTimestamp: config.fromTimestamp || new Date(),
|
||||
cleanupStale: config.cleanupStale !== false,
|
||||
batchSize: config.batchSize || 1000,
|
||||
};
|
||||
this.startTimestamp = new Date();
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the session
|
||||
*/
|
||||
start(): this {
|
||||
if (this.isActive) {
|
||||
throw new Error('Session already active');
|
||||
}
|
||||
|
||||
this.isActive = true;
|
||||
this.operations = [];
|
||||
this.seenDocuments.clear();
|
||||
this.startTimestamp = new Date();
|
||||
|
||||
this.logger.debug('Document session started', {
|
||||
index: this.index,
|
||||
config: this.config,
|
||||
});
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a document (upsert - create or update)
|
||||
*/
|
||||
upsert(documentId: string, document: T): this {
|
||||
this.ensureActive();
|
||||
|
||||
this.operations.push({
|
||||
operation: DocumentOperation.UPSERT,
|
||||
documentId,
|
||||
document,
|
||||
});
|
||||
|
||||
this.seenDocuments.add(documentId);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a document (fails if exists)
|
||||
*/
|
||||
create(documentId: string, document: T): this {
|
||||
this.ensureActive();
|
||||
|
||||
this.operations.push({
|
||||
operation: DocumentOperation.CREATE,
|
||||
documentId,
|
||||
document,
|
||||
});
|
||||
|
||||
this.seenDocuments.add(documentId);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a document (fails if doesn't exist)
|
||||
*/
|
||||
update(documentId: string, document: T, version?: { seqNo: number; primaryTerm: number }): this {
|
||||
this.ensureActive();
|
||||
|
||||
this.operations.push({
|
||||
operation: DocumentOperation.UPDATE,
|
||||
documentId,
|
||||
document,
|
||||
...(version && {
|
||||
seqNo: version.seqNo,
|
||||
primaryTerm: version.primaryTerm,
|
||||
}),
|
||||
});
|
||||
|
||||
this.seenDocuments.add(documentId);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a document
|
||||
*/
|
||||
delete(documentId: string): this {
|
||||
this.ensureActive();
|
||||
|
||||
this.operations.push({
|
||||
operation: DocumentOperation.DELETE,
|
||||
documentId,
|
||||
});
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Commit the session and execute all operations
|
||||
*/
|
||||
async commit(): Promise<BatchResult> {
|
||||
this.ensureActive();
|
||||
|
||||
try {
|
||||
// Execute batched operations
|
||||
const result = await this.executeBatch();
|
||||
|
||||
// Clean up stale documents if configured
|
||||
if (this.config.cleanupStale) {
|
||||
await this.cleanupStaleDocuments();
|
||||
}
|
||||
|
||||
this.isActive = false;
|
||||
|
||||
this.logger.info('Session committed', {
|
||||
index: this.index,
|
||||
successful: result.successful,
|
||||
failed: result.failed,
|
||||
duration: Date.now() - this.startTimestamp.getTime(),
|
||||
});
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
this.logger.error('Session commit failed', error as Error, {
|
||||
index: this.index,
|
||||
operationCount: this.operations.length,
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Rollback the session (discard all operations)
|
||||
*/
|
||||
rollback(): void {
|
||||
this.operations = [];
|
||||
this.seenDocuments.clear();
|
||||
this.isActive = false;
|
||||
|
||||
this.logger.debug('Session rolled back', { index: this.index });
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute batch operations
|
||||
*/
|
||||
private async executeBatch(): Promise<BatchResult> {
|
||||
if (this.operations.length === 0) {
|
||||
return {
|
||||
successful: 0,
|
||||
failed: 0,
|
||||
errors: [],
|
||||
took: 0,
|
||||
};
|
||||
}
|
||||
|
||||
const startTime = Date.now();
|
||||
const bulkBody: any[] = [];
|
||||
|
||||
// Build bulk request body
|
||||
for (const op of this.operations) {
|
||||
switch (op.operation) {
|
||||
case DocumentOperation.CREATE:
|
||||
bulkBody.push({ create: { _index: this.index, _id: op.documentId } });
|
||||
bulkBody.push(op.document);
|
||||
break;
|
||||
|
||||
case DocumentOperation.UPDATE:
|
||||
bulkBody.push({
|
||||
update: {
|
||||
_index: this.index,
|
||||
_id: op.documentId,
|
||||
...(op.seqNo !== undefined && { if_seq_no: op.seqNo }),
|
||||
...(op.primaryTerm !== undefined && { if_primary_term: op.primaryTerm }),
|
||||
},
|
||||
});
|
||||
bulkBody.push({ doc: op.document });
|
||||
break;
|
||||
|
||||
case DocumentOperation.UPSERT:
|
||||
bulkBody.push({ index: { _index: this.index, _id: op.documentId } });
|
||||
bulkBody.push(op.document);
|
||||
break;
|
||||
|
||||
case DocumentOperation.DELETE:
|
||||
bulkBody.push({ delete: { _index: this.index, _id: op.documentId } });
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Execute bulk request
|
||||
const response = await this.client.bulk({
|
||||
body: bulkBody,
|
||||
refresh: true, // Make changes immediately visible
|
||||
});
|
||||
|
||||
const took = Date.now() - startTime;
|
||||
|
||||
// Process results
|
||||
let successful = 0;
|
||||
let failed = 0;
|
||||
const errors: Array<{
|
||||
documentId: string;
|
||||
operation: DocumentOperation;
|
||||
error: string;
|
||||
statusCode: number;
|
||||
}> = [];
|
||||
|
||||
if (response.errors) {
|
||||
for (let i = 0; i < response.items.length; i++) {
|
||||
const item = response.items[i];
|
||||
const operation = this.operations[i];
|
||||
|
||||
const action = Object.keys(item)[0];
|
||||
const result = item[action as keyof typeof item] as any;
|
||||
|
||||
if (result.error) {
|
||||
failed++;
|
||||
errors.push({
|
||||
documentId: operation.documentId,
|
||||
operation: operation.operation,
|
||||
error: result.error.reason || result.error,
|
||||
statusCode: result.status,
|
||||
});
|
||||
} else {
|
||||
successful++;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
successful = response.items.length;
|
||||
}
|
||||
|
||||
const result: BatchResult = {
|
||||
successful,
|
||||
failed,
|
||||
errors,
|
||||
took,
|
||||
};
|
||||
|
||||
if (failed > 0) {
|
||||
this.logger.warn('Batch operation had failures', {
|
||||
successful,
|
||||
failed,
|
||||
errors: errors.slice(0, 5), // Log first 5 errors
|
||||
});
|
||||
|
||||
if (failed === this.operations.length) {
|
||||
// Complete failure
|
||||
throw new BulkOperationError(
|
||||
'All bulk operations failed',
|
||||
successful,
|
||||
failed,
|
||||
errors
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up documents not seen in this session
|
||||
*/
|
||||
private async cleanupStaleDocuments(): Promise<void> {
|
||||
if (this.seenDocuments.size === 0) {
|
||||
return; // No documents to keep, skip cleanup
|
||||
}
|
||||
|
||||
this.logger.debug('Cleaning up stale documents', {
|
||||
index: this.index,
|
||||
seenCount: this.seenDocuments.size,
|
||||
});
|
||||
|
||||
try {
|
||||
// Use deleteByQuery to remove documents not in seen set
|
||||
// This is more efficient than the old scroll-and-compare approach
|
||||
const seenIds = Array.from(this.seenDocuments);
|
||||
|
||||
await this.client.deleteByQuery({
|
||||
index: this.index,
|
||||
body: {
|
||||
query: {
|
||||
bool: {
|
||||
must_not: {
|
||||
ids: {
|
||||
values: seenIds,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
refresh: true,
|
||||
});
|
||||
|
||||
this.logger.debug('Stale documents cleaned up', { index: this.index });
|
||||
} catch (error) {
|
||||
this.logger.warn('Failed to cleanup stale documents', undefined, {
|
||||
index: this.index,
|
||||
error: (error as Error).message,
|
||||
});
|
||||
// Don't throw - cleanup is best-effort
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure session is active
|
||||
*/
|
||||
private ensureActive(): void {
|
||||
if (!this.isActive) {
|
||||
throw new Error('Session not active. Call start() first.');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get session statistics
|
||||
*/
|
||||
getStats(): {
|
||||
isActive: boolean;
|
||||
operationCount: number;
|
||||
seenDocumentCount: number;
|
||||
startTime: Date;
|
||||
} {
|
||||
return {
|
||||
isActive: this.isActive,
|
||||
operationCount: this.operations.length,
|
||||
seenDocumentCount: this.seenDocuments.size,
|
||||
startTime: this.startTimestamp,
|
||||
};
|
||||
}
|
||||
}
|
||||
16
ts/domain/documents/index.ts
Normal file
16
ts/domain/documents/index.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
/**
|
||||
* Document management API
|
||||
*
|
||||
* This module provides:
|
||||
* - Fluent document manager with full CRUD operations
|
||||
* - Session-based batch operations with automatic cleanup
|
||||
* - Snapshot functionality for point-in-time analytics
|
||||
* - Async iteration over documents
|
||||
* - Optimistic locking support
|
||||
*
|
||||
* @packageDocumentation
|
||||
*/
|
||||
|
||||
export * from './types.js';
|
||||
export * from './document-session.js';
|
||||
export * from './document-manager.js';
|
||||
122
ts/domain/documents/types.ts
Normal file
122
ts/domain/documents/types.ts
Normal file
@@ -0,0 +1,122 @@
|
||||
/**
|
||||
* Document operation types
|
||||
*/
|
||||
export enum DocumentOperation {
|
||||
CREATE = 'create',
|
||||
UPDATE = 'update',
|
||||
UPSERT = 'upsert',
|
||||
DELETE = 'delete',
|
||||
}
|
||||
|
||||
/**
|
||||
* Document with metadata
|
||||
*/
|
||||
export interface DocumentWithMeta<T = unknown> {
|
||||
/** Document ID */
|
||||
_id: string;
|
||||
|
||||
/** Document source */
|
||||
_source: T;
|
||||
|
||||
/** Document version (for optimistic locking) */
|
||||
_version?: number;
|
||||
|
||||
/** Sequence number (for optimistic locking) */
|
||||
_seq_no?: number;
|
||||
|
||||
/** Primary term (for optimistic locking) */
|
||||
_primary_term?: number;
|
||||
|
||||
/** Document index */
|
||||
_index?: string;
|
||||
|
||||
/** Document score (from search) */
|
||||
_score?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch operation for bulk requests
|
||||
*/
|
||||
export interface BatchOperation<T = unknown> {
|
||||
operation: DocumentOperation;
|
||||
documentId: string;
|
||||
document?: T;
|
||||
version?: number;
|
||||
seqNo?: number;
|
||||
primaryTerm?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch result
|
||||
*/
|
||||
export interface BatchResult {
|
||||
successful: number;
|
||||
failed: number;
|
||||
errors: Array<{
|
||||
documentId: string;
|
||||
operation: DocumentOperation;
|
||||
error: string;
|
||||
statusCode: number;
|
||||
}>;
|
||||
took: number; // Time in milliseconds
|
||||
}
|
||||
|
||||
/**
|
||||
* Session configuration
|
||||
*/
|
||||
export interface SessionConfig {
|
||||
/** Only process documents newer than a timestamp */
|
||||
onlyNew?: boolean;
|
||||
|
||||
/** Start from a specific point in time */
|
||||
fromTimestamp?: Date;
|
||||
|
||||
/** Delete documents not seen in session */
|
||||
cleanupStale?: boolean;
|
||||
|
||||
/** Batch size for operations */
|
||||
batchSize?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Snapshot processor function
|
||||
*/
|
||||
export type SnapshotProcessor<T, R> = (
|
||||
iterator: AsyncIterableIterator<DocumentWithMeta<T>>,
|
||||
previousSnapshot: R | null
|
||||
) => Promise<R>;
|
||||
|
||||
/**
|
||||
* Snapshot metadata
|
||||
*/
|
||||
export interface SnapshotMeta<T = unknown> {
|
||||
date: Date;
|
||||
data: T;
|
||||
documentCount: number;
|
||||
processingTime: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Document iterator options
|
||||
*/
|
||||
export interface IteratorOptions {
|
||||
/** Batch size for scrolling */
|
||||
batchSize?: number;
|
||||
|
||||
/** Filter by timestamp */
|
||||
fromTimestamp?: Date;
|
||||
|
||||
/** Sort order */
|
||||
sort?: Array<{ [key: string]: 'asc' | 'desc' }>;
|
||||
|
||||
/** Query filter */
|
||||
query?: unknown;
|
||||
}
|
||||
|
||||
/**
|
||||
* Point-in-time ID for pagination
|
||||
*/
|
||||
export interface PitId {
|
||||
id: string;
|
||||
keepAlive: string;
|
||||
}
|
||||
27
ts/domain/kv/index.ts
Normal file
27
ts/domain/kv/index.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
/**
|
||||
* Key-Value Store Module
|
||||
*
|
||||
* Distributed caching with TTL support
|
||||
*/
|
||||
|
||||
// Main classes
|
||||
export { KVStore, createKVStore } from './kv-store.js';
|
||||
|
||||
// Types
|
||||
export type {
|
||||
KVOperationResult,
|
||||
KVSetOptions,
|
||||
KVGetOptions,
|
||||
KVDeleteOptions,
|
||||
KVScanOptions,
|
||||
KVScanResult,
|
||||
CacheEvictionPolicy,
|
||||
CacheStats,
|
||||
KVStoreConfig,
|
||||
KVStoreStats,
|
||||
KVDocument,
|
||||
CacheEntry,
|
||||
KVBatchGetResult,
|
||||
KVBatchSetResult,
|
||||
KVBatchDeleteResult,
|
||||
} from './types.js';
|
||||
1078
ts/domain/kv/kv-store.ts
Normal file
1078
ts/domain/kv/kv-store.ts
Normal file
File diff suppressed because it is too large
Load Diff
345
ts/domain/kv/types.ts
Normal file
345
ts/domain/kv/types.ts
Normal file
@@ -0,0 +1,345 @@
|
||||
/**
|
||||
* Key-Value Store types for distributed caching with TTL support
|
||||
*/
|
||||
|
||||
/**
|
||||
* KV operation result
|
||||
*/
|
||||
export interface KVOperationResult<T = unknown> {
|
||||
/** Whether operation succeeded */
|
||||
success: boolean;
|
||||
|
||||
/** Retrieved value (for get operations) */
|
||||
value?: T;
|
||||
|
||||
/** Whether key exists */
|
||||
exists: boolean;
|
||||
|
||||
/** Error if operation failed */
|
||||
error?: {
|
||||
type: string;
|
||||
reason: string;
|
||||
};
|
||||
|
||||
/** Version info for optimistic concurrency */
|
||||
version?: {
|
||||
seqNo: number;
|
||||
primaryTerm: number;
|
||||
};
|
||||
|
||||
/** Expiration timestamp (for TTL keys) */
|
||||
expiresAt?: Date;
|
||||
|
||||
/** Cache hit/miss info */
|
||||
cacheHit?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* KV set options
|
||||
*/
|
||||
export interface KVSetOptions {
|
||||
/** Time-to-live in seconds */
|
||||
ttl?: number;
|
||||
|
||||
/** Only set if key doesn't exist */
|
||||
nx?: boolean;
|
||||
|
||||
/** Only set if key exists */
|
||||
xx?: boolean;
|
||||
|
||||
/** Optimistic concurrency control */
|
||||
ifSeqNo?: number;
|
||||
ifPrimaryTerm?: number;
|
||||
|
||||
/** Routing value */
|
||||
routing?: string;
|
||||
|
||||
/** Pipeline to execute */
|
||||
pipeline?: string;
|
||||
|
||||
/** Skip cache and write directly to Elasticsearch */
|
||||
skipCache?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* KV get options
|
||||
*/
|
||||
export interface KVGetOptions {
|
||||
/** Return default value if key doesn't exist */
|
||||
default?: unknown;
|
||||
|
||||
/** Skip cache and read directly from Elasticsearch */
|
||||
skipCache?: boolean;
|
||||
|
||||
/** Routing value */
|
||||
routing?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* KV delete options
|
||||
*/
|
||||
export interface KVDeleteOptions {
|
||||
/** Optimistic concurrency control */
|
||||
ifSeqNo?: number;
|
||||
ifPrimaryTerm?: number;
|
||||
|
||||
/** Routing value */
|
||||
routing?: string;
|
||||
|
||||
/** Also remove from cache */
|
||||
invalidateCache?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* KV scan options
|
||||
*/
|
||||
export interface KVScanOptions {
|
||||
/** Pattern to match keys (supports wildcards) */
|
||||
pattern?: string;
|
||||
|
||||
/** Maximum keys to return */
|
||||
limit?: number;
|
||||
|
||||
/** Scroll cursor for pagination */
|
||||
cursor?: string;
|
||||
|
||||
/** Include values in scan results */
|
||||
includeValues?: boolean;
|
||||
|
||||
/** Routing value */
|
||||
routing?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* KV scan result
|
||||
*/
|
||||
export interface KVScanResult<T = unknown> {
|
||||
/** Matched keys */
|
||||
keys: string[];
|
||||
|
||||
/** Values (if includeValues was true) */
|
||||
values?: T[];
|
||||
|
||||
/** Next cursor for pagination */
|
||||
nextCursor?: string;
|
||||
|
||||
/** Total matches found */
|
||||
total: number;
|
||||
|
||||
/** Whether there are more results */
|
||||
hasMore: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache eviction policy
|
||||
*/
|
||||
export type CacheEvictionPolicy = 'lru' | 'lfu' | 'fifo' | 'ttl';
|
||||
|
||||
/**
|
||||
* Cache statistics
|
||||
*/
|
||||
export interface CacheStats {
|
||||
/** Total cache entries */
|
||||
size: number;
|
||||
|
||||
/** Maximum cache size */
|
||||
maxSize: number;
|
||||
|
||||
/** Cache hits */
|
||||
hits: number;
|
||||
|
||||
/** Cache misses */
|
||||
misses: number;
|
||||
|
||||
/** Hit ratio */
|
||||
hitRatio: number;
|
||||
|
||||
/** Total evictions */
|
||||
evictions: number;
|
||||
|
||||
/** Memory usage estimate (bytes) */
|
||||
memoryUsage: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* KV Store configuration
|
||||
*/
|
||||
export interface KVStoreConfig {
|
||||
/** Index name for key-value storage */
|
||||
index: string;
|
||||
|
||||
/** Default TTL in seconds */
|
||||
defaultTTL?: number;
|
||||
|
||||
/** Enable in-memory caching */
|
||||
enableCache?: boolean;
|
||||
|
||||
/** Maximum cache entries */
|
||||
cacheMaxSize?: number;
|
||||
|
||||
/** Cache eviction policy */
|
||||
cacheEvictionPolicy?: CacheEvictionPolicy;
|
||||
|
||||
/** Cache TTL in seconds (separate from KV TTL) */
|
||||
cacheTTL?: number;
|
||||
|
||||
/** Enable automatic expiration cleanup */
|
||||
enableExpirationCleanup?: boolean;
|
||||
|
||||
/** Expiration cleanup interval in seconds */
|
||||
cleanupIntervalSeconds?: number;
|
||||
|
||||
/** Batch size for cleanup operations */
|
||||
cleanupBatchSize?: number;
|
||||
|
||||
/** Default routing for all operations */
|
||||
defaultRouting?: string;
|
||||
|
||||
/** Enable compression for large values */
|
||||
enableCompression?: boolean;
|
||||
|
||||
/** Compression threshold in bytes */
|
||||
compressionThreshold?: number;
|
||||
|
||||
/** Refresh policy */
|
||||
refresh?: boolean | 'wait_for';
|
||||
|
||||
/** Enable optimistic concurrency by default */
|
||||
enableOptimisticConcurrency?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* KV Store statistics
|
||||
*/
|
||||
export interface KVStoreStats {
|
||||
/** Total keys stored */
|
||||
totalKeys: number;
|
||||
|
||||
/** Total get operations */
|
||||
totalGets: number;
|
||||
|
||||
/** Total set operations */
|
||||
totalSets: number;
|
||||
|
||||
/** Total delete operations */
|
||||
totalDeletes: number;
|
||||
|
||||
/** Total scan operations */
|
||||
totalScans: number;
|
||||
|
||||
/** Total expired keys cleaned */
|
||||
totalExpired: number;
|
||||
|
||||
/** Cache statistics */
|
||||
cacheStats?: CacheStats;
|
||||
|
||||
/** Average operation duration */
|
||||
avgGetDurationMs: number;
|
||||
avgSetDurationMs: number;
|
||||
avgDeleteDurationMs: number;
|
||||
|
||||
/** Storage size estimate (bytes) */
|
||||
storageSize: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal KV document structure
|
||||
*/
|
||||
export interface KVDocument<T = unknown> {
|
||||
/** The key */
|
||||
key: string;
|
||||
|
||||
/** The value */
|
||||
value: T;
|
||||
|
||||
/** Creation timestamp */
|
||||
createdAt: Date;
|
||||
|
||||
/** Last update timestamp */
|
||||
updatedAt: Date;
|
||||
|
||||
/** Expiration timestamp (null = no expiration) */
|
||||
expiresAt: Date | null;
|
||||
|
||||
/** Metadata */
|
||||
metadata?: {
|
||||
size?: number;
|
||||
compressed?: boolean;
|
||||
contentType?: string;
|
||||
tags?: string[];
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache entry
|
||||
*/
|
||||
export interface CacheEntry<T = unknown> {
|
||||
/** Cached value */
|
||||
value: T;
|
||||
|
||||
/** Cache entry creation time */
|
||||
cachedAt: Date;
|
||||
|
||||
/** Cache entry expiration time */
|
||||
expiresAt?: Date;
|
||||
|
||||
/** Last access time (for LRU) */
|
||||
lastAccessedAt: Date;
|
||||
|
||||
/** Access count (for LFU) */
|
||||
accessCount: number;
|
||||
|
||||
/** Entry size estimate */
|
||||
size: number;
|
||||
|
||||
/** Version info */
|
||||
version?: {
|
||||
seqNo: number;
|
||||
primaryTerm: number;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch get result
|
||||
*/
|
||||
export interface KVBatchGetResult<T = unknown> {
|
||||
/** Key-value map of results */
|
||||
results: Map<string, KVOperationResult<T>>;
|
||||
|
||||
/** Number of keys found */
|
||||
found: number;
|
||||
|
||||
/** Number of keys not found */
|
||||
notFound: number;
|
||||
|
||||
/** Cache hit count */
|
||||
cacheHits: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch set result
|
||||
*/
|
||||
export interface KVBatchSetResult {
|
||||
/** Number of successful sets */
|
||||
successful: number;
|
||||
|
||||
/** Number of failed sets */
|
||||
failed: number;
|
||||
|
||||
/** Individual results */
|
||||
results: Map<string, KVOperationResult>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch delete result
|
||||
*/
|
||||
export interface KVBatchDeleteResult {
|
||||
/** Number of successful deletes */
|
||||
successful: number;
|
||||
|
||||
/** Number of failed deletes */
|
||||
failed: number;
|
||||
|
||||
/** Individual results */
|
||||
results: Map<string, KVOperationResult>;
|
||||
}
|
||||
136
ts/domain/logging/enrichers.ts
Normal file
136
ts/domain/logging/enrichers.ts
Normal file
@@ -0,0 +1,136 @@
|
||||
/**
|
||||
* Common log enrichers
|
||||
*/
|
||||
|
||||
import type { LogEntry, LogEnricher } from './types.js';
|
||||
import { hostname } from 'os';
|
||||
|
||||
/**
|
||||
* Add hostname to log entry
|
||||
*/
|
||||
export const addHostInfo: LogEnricher = (entry: LogEntry): LogEntry => {
|
||||
return {
|
||||
...entry,
|
||||
host: hostname(),
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Add environment from NODE_ENV
|
||||
*/
|
||||
export const addEnvironment: LogEnricher = (entry: LogEntry): LogEntry => {
|
||||
return {
|
||||
...entry,
|
||||
environment: process.env.NODE_ENV || 'development',
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Add service info from environment variables
|
||||
*/
|
||||
export const addServiceInfo: LogEnricher = (entry: LogEntry): LogEntry => {
|
||||
return {
|
||||
...entry,
|
||||
service: entry.service || process.env.SERVICE_NAME,
|
||||
version: entry.version || process.env.SERVICE_VERSION,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Add process info (PID, memory, uptime)
|
||||
*/
|
||||
export const addProcessInfo: LogEnricher = (entry: LogEntry): LogEntry => {
|
||||
const memoryUsage = process.memoryUsage();
|
||||
|
||||
return {
|
||||
...entry,
|
||||
metadata: {
|
||||
...entry.metadata,
|
||||
process: {
|
||||
pid: process.pid,
|
||||
uptime: process.uptime(),
|
||||
memory: {
|
||||
heapUsed: memoryUsage.heapUsed,
|
||||
heapTotal: memoryUsage.heapTotal,
|
||||
external: memoryUsage.external,
|
||||
rss: memoryUsage.rss,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Add timestamp if not present
|
||||
*/
|
||||
export const addTimestamp: LogEnricher = (entry: LogEntry): LogEntry => {
|
||||
return {
|
||||
...entry,
|
||||
timestamp: entry.timestamp || new Date().toISOString(),
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Sanitize sensitive data from log entry
|
||||
*/
|
||||
export const sanitizeSensitiveData = (
|
||||
patterns: Array<{ path: string; replacement?: string }>
|
||||
): LogEnricher => {
|
||||
return (entry: LogEntry): LogEntry => {
|
||||
const sanitized = { ...entry };
|
||||
|
||||
for (const { path, replacement = '[REDACTED]' } of patterns) {
|
||||
const parts = path.split('.');
|
||||
let current: any = sanitized;
|
||||
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
if (current === null || current === undefined) break;
|
||||
current = current[parts[i] as string];
|
||||
}
|
||||
|
||||
if (current && parts.length > 0) {
|
||||
const lastPart = parts[parts.length - 1];
|
||||
if (lastPart && current[lastPart] !== undefined) {
|
||||
current[lastPart] = replacement;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return sanitized;
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Add custom tags based on log content
|
||||
*/
|
||||
export const addDynamicTags = (
|
||||
taggers: Array<{ condition: (entry: LogEntry) => boolean; tag: string }>
|
||||
): LogEnricher => {
|
||||
return (entry: LogEntry): LogEntry => {
|
||||
const tags = new Set(entry.tags || []);
|
||||
|
||||
for (const { condition, tag } of taggers) {
|
||||
if (condition(entry)) {
|
||||
tags.add(tag);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...entry,
|
||||
tags: Array.from(tags),
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Chain multiple enrichers
|
||||
*/
|
||||
export const chainEnrichers = (...enrichers: LogEnricher[]): LogEnricher => {
|
||||
return async (entry: LogEntry): Promise<LogEntry> => {
|
||||
let enriched = entry;
|
||||
for (const enricher of enrichers) {
|
||||
enriched = await enricher(enriched);
|
||||
}
|
||||
return enriched;
|
||||
};
|
||||
};
|
||||
33
ts/domain/logging/index.ts
Normal file
33
ts/domain/logging/index.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
/**
|
||||
* Logging Domain Module
|
||||
*
|
||||
* Enterprise logging with structured log ingestion
|
||||
*/
|
||||
|
||||
// Main classes
|
||||
export { LogDestination, createLogDestination } from './log-destination.js';
|
||||
|
||||
// Enrichers
|
||||
export {
|
||||
addHostInfo,
|
||||
addEnvironment,
|
||||
addServiceInfo,
|
||||
addProcessInfo,
|
||||
addTimestamp,
|
||||
sanitizeSensitiveData,
|
||||
addDynamicTags,
|
||||
chainEnrichers,
|
||||
} from './enrichers.js';
|
||||
|
||||
// Types
|
||||
export type {
|
||||
LogEntry,
|
||||
LogEnricher,
|
||||
SamplingStrategy,
|
||||
SamplingConfig,
|
||||
ILMPolicyConfig,
|
||||
MetricExtraction,
|
||||
LogDestinationConfig,
|
||||
LogBatchResult,
|
||||
LogDestinationStats,
|
||||
} from './types.js';
|
||||
569
ts/domain/logging/log-destination.ts
Normal file
569
ts/domain/logging/log-destination.ts
Normal file
@@ -0,0 +1,569 @@
|
||||
import type {
|
||||
LogEntry,
|
||||
LogDestinationConfig,
|
||||
LogBatchResult,
|
||||
LogDestinationStats,
|
||||
SamplingConfig,
|
||||
ILMPolicyConfig,
|
||||
MetricExtraction,
|
||||
} from './types.js';
|
||||
import { ElasticsearchConnectionManager } from '../../core/connection/connection-manager.js';
|
||||
import { defaultLogger } from '../../core/observability/logger.js';
|
||||
import { defaultMetrics } from '../../core/observability/metrics.js';
|
||||
import { defaultTracing } from '../../core/observability/tracing.js';
|
||||
|
||||
/**
|
||||
* Enterprise-grade log destination for Elasticsearch
|
||||
*
|
||||
* Features:
|
||||
* - Batched bulk indexing with configurable batch size
|
||||
* - Automatic flushing at intervals
|
||||
* - Log enrichment pipeline
|
||||
* - Sampling strategies (all, errors-only, percentage, rate-limit)
|
||||
* - ILM (Index Lifecycle Management) integration
|
||||
* - Metric extraction from logs
|
||||
* - Auto index template creation
|
||||
* - Queue overflow protection
|
||||
* - Full observability integration
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const logDest = new LogDestination({
|
||||
* indexPattern: 'logs-myapp-{now/d}',
|
||||
* batchSize: 100,
|
||||
* flushIntervalMs: 5000,
|
||||
* sampling: {
|
||||
* strategy: 'percentage',
|
||||
* percentage: 10,
|
||||
* alwaysSampleErrors: true
|
||||
* },
|
||||
* enrichers: [addHostInfo, addEnvironment],
|
||||
* ilm: {
|
||||
* name: 'logs-policy',
|
||||
* hotDuration: '7d',
|
||||
* deleteDuration: '30d'
|
||||
* }
|
||||
* });
|
||||
*
|
||||
* await logDest.initialize();
|
||||
* await logDest.send({
|
||||
* timestamp: new Date().toISOString(),
|
||||
* level: 'INFO',
|
||||
* message: 'User logged in',
|
||||
* metadata: { userId: '123' }
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export class LogDestination {
|
||||
private config: Required<LogDestinationConfig>;
|
||||
private queue: LogEntry[] = [];
|
||||
private flushTimer?: NodeJS.Timeout;
|
||||
private stats: LogDestinationStats = {
|
||||
totalLogs: 0,
|
||||
totalSuccessful: 0,
|
||||
totalFailed: 0,
|
||||
totalSampled: 0,
|
||||
totalDropped: 0,
|
||||
queueSize: 0,
|
||||
avgBatchDurationMs: 0,
|
||||
};
|
||||
private batchDurations: number[] = [];
|
||||
private lastRateLimitReset = Date.now();
|
||||
private rateLimitCounter = 0;
|
||||
private initialized = false;
|
||||
|
||||
constructor(config: LogDestinationConfig) {
|
||||
this.config = {
|
||||
indexPattern: config.indexPattern,
|
||||
batchSize: config.batchSize ?? 100,
|
||||
flushIntervalMs: config.flushIntervalMs ?? 5000,
|
||||
maxQueueSize: config.maxQueueSize ?? 10000,
|
||||
enrichers: config.enrichers ?? [],
|
||||
sampling: config.sampling ?? { strategy: 'all', alwaysSampleErrors: true },
|
||||
ilm: config.ilm,
|
||||
metrics: config.metrics ?? [],
|
||||
autoCreateTemplate: config.autoCreateTemplate ?? true,
|
||||
templateSettings: config.templateSettings ?? {
|
||||
numberOfShards: 1,
|
||||
numberOfReplicas: 1,
|
||||
refreshInterval: '5s',
|
||||
codec: 'best_compression',
|
||||
},
|
||||
templateMappings: config.templateMappings ?? {},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new log destination
|
||||
*/
|
||||
static create(config: LogDestinationConfig): LogDestination {
|
||||
return new LogDestination(config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the log destination (create template, ILM policy)
|
||||
*/
|
||||
async initialize(): Promise<void> {
|
||||
if (this.initialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
const span = defaultTracing.createSpan('logDestination.initialize');
|
||||
|
||||
try {
|
||||
// Create ILM policy if configured
|
||||
if (this.config.ilm) {
|
||||
await this.createILMPolicy(this.config.ilm);
|
||||
}
|
||||
|
||||
// Create index template if enabled
|
||||
if (this.config.autoCreateTemplate) {
|
||||
await this.createIndexTemplate();
|
||||
}
|
||||
|
||||
// Start flush timer
|
||||
this.startFlushTimer();
|
||||
|
||||
this.initialized = true;
|
||||
defaultLogger.info('Log destination initialized', {
|
||||
indexPattern: this.config.indexPattern,
|
||||
batchSize: this.config.batchSize,
|
||||
flushIntervalMs: this.config.flushIntervalMs,
|
||||
});
|
||||
|
||||
span.end();
|
||||
} catch (error) {
|
||||
defaultLogger.error('Failed to initialize log destination', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
span.recordException(error as Error);
|
||||
span.end();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a log entry
|
||||
*/
|
||||
async send(entry: LogEntry): Promise<void> {
|
||||
this.stats.totalLogs++;
|
||||
|
||||
// Apply sampling
|
||||
if (!this.shouldSample(entry)) {
|
||||
this.stats.totalSampled++;
|
||||
return;
|
||||
}
|
||||
|
||||
// Apply enrichers
|
||||
let enrichedEntry = entry;
|
||||
for (const enricher of this.config.enrichers) {
|
||||
enrichedEntry = await enricher(enrichedEntry);
|
||||
}
|
||||
|
||||
// Extract metrics if configured
|
||||
if (this.config.metrics.length > 0) {
|
||||
this.extractMetrics(enrichedEntry);
|
||||
}
|
||||
|
||||
// Check queue size
|
||||
if (this.queue.length >= this.config.maxQueueSize) {
|
||||
this.stats.totalDropped++;
|
||||
defaultLogger.warn('Log queue overflow, dropping log', {
|
||||
queueSize: this.queue.length,
|
||||
maxQueueSize: this.config.maxQueueSize,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Add to queue
|
||||
this.queue.push(enrichedEntry);
|
||||
this.stats.queueSize = this.queue.length;
|
||||
|
||||
// Flush if batch size reached
|
||||
if (this.queue.length >= this.config.batchSize) {
|
||||
await this.flush();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send multiple log entries
|
||||
*/
|
||||
async sendBatch(entries: LogEntry[]): Promise<void> {
|
||||
for (const entry of entries) {
|
||||
await this.send(entry);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Flush pending logs immediately
|
||||
*/
|
||||
async flush(): Promise<LogBatchResult | null> {
|
||||
if (this.queue.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const span = defaultTracing.createSpan('logDestination.flush', {
|
||||
'batch.size': this.queue.length,
|
||||
});
|
||||
|
||||
const startTime = Date.now();
|
||||
const batch = this.queue.splice(0, this.config.batchSize);
|
||||
this.stats.queueSize = this.queue.length;
|
||||
|
||||
try {
|
||||
const client = ElasticsearchConnectionManager.getInstance().getClient();
|
||||
|
||||
// Build bulk operations
|
||||
const operations = batch.flatMap((entry) => [
|
||||
{ index: { _index: this.resolveIndexName() } },
|
||||
entry,
|
||||
]);
|
||||
|
||||
// Execute bulk request
|
||||
const result = await client.bulk({ operations });
|
||||
|
||||
const durationMs = Date.now() - startTime;
|
||||
this.batchDurations.push(durationMs);
|
||||
if (this.batchDurations.length > 100) {
|
||||
this.batchDurations.shift();
|
||||
}
|
||||
this.stats.avgBatchDurationMs =
|
||||
this.batchDurations.reduce((a, b) => a + b, 0) / this.batchDurations.length;
|
||||
this.stats.lastFlushAt = new Date();
|
||||
|
||||
// Process results
|
||||
const errors: Array<{ log: LogEntry; error: string }> = [];
|
||||
let successful = 0;
|
||||
let failed = 0;
|
||||
|
||||
if (result.items) {
|
||||
result.items.forEach((item, index) => {
|
||||
const operation = item.index || item.create || item.update;
|
||||
if (operation && operation.error) {
|
||||
failed++;
|
||||
errors.push({
|
||||
log: batch[index] as LogEntry,
|
||||
error: JSON.stringify(operation.error),
|
||||
});
|
||||
} else {
|
||||
successful++;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
this.stats.totalSuccessful += successful;
|
||||
this.stats.totalFailed += failed;
|
||||
|
||||
// Record metrics
|
||||
defaultMetrics.requestsTotal.inc({ operation: 'log_flush', result: 'success' });
|
||||
defaultMetrics.requestDuration.observe({ operation: 'log_flush' }, durationMs);
|
||||
|
||||
if (failed > 0) {
|
||||
defaultLogger.warn('Some logs failed to index', {
|
||||
successful,
|
||||
failed,
|
||||
errors: errors.slice(0, 5), // Log first 5 errors
|
||||
});
|
||||
}
|
||||
|
||||
span.setAttributes({
|
||||
'batch.successful': successful,
|
||||
'batch.failed': failed,
|
||||
'batch.duration_ms': durationMs,
|
||||
});
|
||||
span.end();
|
||||
|
||||
return {
|
||||
successful,
|
||||
failed,
|
||||
total: batch.length,
|
||||
errors: errors.length > 0 ? errors : undefined,
|
||||
durationMs,
|
||||
};
|
||||
} catch (error) {
|
||||
this.stats.totalFailed += batch.length;
|
||||
defaultMetrics.requestErrors.inc({ operation: 'log_flush' });
|
||||
|
||||
defaultLogger.error('Failed to flush logs', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
batchSize: batch.length,
|
||||
});
|
||||
|
||||
span.recordException(error as Error);
|
||||
span.end();
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get destination statistics
|
||||
*/
|
||||
getStats(): LogDestinationStats {
|
||||
return { ...this.stats };
|
||||
}
|
||||
|
||||
/**
|
||||
* Destroy the destination (flush pending logs and stop timer)
|
||||
*/
|
||||
async destroy(): Promise<void> {
|
||||
if (this.flushTimer) {
|
||||
clearInterval(this.flushTimer);
|
||||
}
|
||||
|
||||
// Flush remaining logs
|
||||
if (this.queue.length > 0) {
|
||||
await this.flush();
|
||||
}
|
||||
|
||||
this.initialized = false;
|
||||
defaultLogger.info('Log destination destroyed', {
|
||||
stats: this.stats,
|
||||
});
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Private Methods
|
||||
// ============================================================================
|
||||
|
||||
private startFlushTimer(): void {
|
||||
this.flushTimer = setInterval(async () => {
|
||||
if (this.queue.length > 0) {
|
||||
try {
|
||||
await this.flush();
|
||||
} catch (error) {
|
||||
defaultLogger.error('Flush timer error', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
}, this.config.flushIntervalMs);
|
||||
}
|
||||
|
||||
private shouldSample(entry: LogEntry): boolean {
|
||||
const sampling = this.config.sampling;
|
||||
|
||||
// Always sample errors if configured
|
||||
if (sampling.alwaysSampleErrors && entry.level === 'ERROR') {
|
||||
return true;
|
||||
}
|
||||
|
||||
switch (sampling.strategy) {
|
||||
case 'all':
|
||||
return true;
|
||||
|
||||
case 'errors-only':
|
||||
return entry.level === 'ERROR';
|
||||
|
||||
case 'percentage':
|
||||
return Math.random() * 100 < (sampling.percentage ?? 100);
|
||||
|
||||
case 'rate-limit': {
|
||||
const now = Date.now();
|
||||
if (now - this.lastRateLimitReset >= 1000) {
|
||||
this.lastRateLimitReset = now;
|
||||
this.rateLimitCounter = 0;
|
||||
}
|
||||
this.rateLimitCounter++;
|
||||
return this.rateLimitCounter <= (sampling.maxLogsPerSecond ?? 100);
|
||||
}
|
||||
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
private resolveIndexName(): string {
|
||||
// Support date math in index pattern
|
||||
const pattern = this.config.indexPattern;
|
||||
|
||||
// Simple date math support for {now/d}
|
||||
if (pattern.includes('{now/d}')) {
|
||||
const date = new Date().toISOString().split('T')[0];
|
||||
return pattern.replace('{now/d}', date);
|
||||
}
|
||||
|
||||
// Support {now/M} for month
|
||||
if (pattern.includes('{now/M}')) {
|
||||
const date = new Date();
|
||||
const month = `${date.getFullYear()}.${String(date.getMonth() + 1).padStart(2, '0')}`;
|
||||
return pattern.replace('{now/M}', month);
|
||||
}
|
||||
|
||||
return pattern;
|
||||
}
|
||||
|
||||
private extractMetrics(entry: LogEntry): void {
|
||||
for (const metric of this.config.metrics) {
|
||||
const value = this.getNestedValue(entry, metric.field);
|
||||
if (value === undefined) continue;
|
||||
|
||||
const labels: Record<string, string> = {};
|
||||
if (metric.labels) {
|
||||
for (const labelField of metric.labels) {
|
||||
const labelValue = this.getNestedValue(entry, labelField);
|
||||
if (labelValue !== undefined) {
|
||||
labels[labelField] = String(labelValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
switch (metric.type) {
|
||||
case 'counter':
|
||||
defaultMetrics.requestsTotal.inc({ ...labels, metric: metric.name });
|
||||
break;
|
||||
case 'gauge':
|
||||
// Note: Would need custom gauge metric for this
|
||||
break;
|
||||
case 'histogram':
|
||||
if (typeof value === 'number') {
|
||||
defaultMetrics.requestDuration.observe({ ...labels, metric: metric.name }, value);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private getNestedValue(obj: unknown, path: string): unknown {
|
||||
const parts = path.split('.');
|
||||
let current = obj;
|
||||
|
||||
for (const part of parts) {
|
||||
if (current === null || current === undefined || typeof current !== 'object') {
|
||||
return undefined;
|
||||
}
|
||||
current = (current as Record<string, unknown>)[part];
|
||||
}
|
||||
|
||||
return current;
|
||||
}
|
||||
|
||||
private async createILMPolicy(ilm: ILMPolicyConfig): Promise<void> {
|
||||
const client = ElasticsearchConnectionManager.getInstance().getClient();
|
||||
|
||||
const policy = {
|
||||
policy: {
|
||||
phases: {
|
||||
...(ilm.hotDuration && {
|
||||
hot: {
|
||||
actions: {
|
||||
...(ilm.rollover && { rollover: ilm.rollover }),
|
||||
},
|
||||
},
|
||||
}),
|
||||
...(ilm.warmDuration && {
|
||||
warm: {
|
||||
min_age: ilm.warmDuration,
|
||||
actions: {
|
||||
shrink: { number_of_shards: 1 },
|
||||
forcemerge: { max_num_segments: 1 },
|
||||
},
|
||||
},
|
||||
}),
|
||||
...(ilm.coldDuration && {
|
||||
cold: {
|
||||
min_age: ilm.coldDuration,
|
||||
actions: {
|
||||
freeze: {},
|
||||
},
|
||||
},
|
||||
}),
|
||||
...(ilm.deleteDuration && {
|
||||
delete: {
|
||||
min_age: ilm.deleteDuration,
|
||||
actions: {
|
||||
delete: {},
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
await client.ilm.putLifecycle({
|
||||
name: ilm.name,
|
||||
...policy,
|
||||
});
|
||||
defaultLogger.info('ILM policy created', { policy: ilm.name });
|
||||
} catch (error) {
|
||||
defaultLogger.warn('Failed to create ILM policy (may already exist)', {
|
||||
policy: ilm.name,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private async createIndexTemplate(): Promise<void> {
|
||||
const client = ElasticsearchConnectionManager.getInstance().getClient();
|
||||
|
||||
const templateName = `logs-${this.config.indexPattern.split('-')[1] || 'default'}-template`;
|
||||
const indexPattern = this.config.indexPattern.replace(/\{.*?\}/g, '*');
|
||||
|
||||
const template = {
|
||||
index_patterns: [indexPattern],
|
||||
template: {
|
||||
settings: {
|
||||
number_of_shards: this.config.templateSettings.numberOfShards,
|
||||
number_of_replicas: this.config.templateSettings.numberOfReplicas,
|
||||
refresh_interval: this.config.templateSettings.refreshInterval,
|
||||
codec: this.config.templateSettings.codec,
|
||||
...(this.config.ilm && {
|
||||
'index.lifecycle.name': this.config.ilm.name,
|
||||
'index.lifecycle.rollover_alias': indexPattern,
|
||||
}),
|
||||
},
|
||||
mappings: {
|
||||
properties: {
|
||||
timestamp: { type: 'date' },
|
||||
level: { type: 'keyword' },
|
||||
message: { type: 'text' },
|
||||
correlationId: { type: 'keyword' },
|
||||
service: { type: 'keyword' },
|
||||
version: { type: 'keyword' },
|
||||
host: { type: 'keyword' },
|
||||
environment: { type: 'keyword' },
|
||||
tags: { type: 'keyword' },
|
||||
metadata: { type: 'object', enabled: false },
|
||||
error: {
|
||||
properties: {
|
||||
name: { type: 'keyword' },
|
||||
message: { type: 'text' },
|
||||
stack: { type: 'text' },
|
||||
code: { type: 'keyword' },
|
||||
},
|
||||
},
|
||||
metrics: {
|
||||
properties: {
|
||||
duration: { type: 'long' },
|
||||
memory: { type: 'long' },
|
||||
cpu: { type: 'float' },
|
||||
},
|
||||
},
|
||||
...this.config.templateMappings,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
await client.indices.putIndexTemplate({
|
||||
name: templateName,
|
||||
...template,
|
||||
});
|
||||
defaultLogger.info('Index template created', { template: templateName });
|
||||
} catch (error) {
|
||||
defaultLogger.warn('Failed to create index template (may already exist)', {
|
||||
template: templateName,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new log destination
|
||||
*/
|
||||
export function createLogDestination(config: LogDestinationConfig): LogDestination {
|
||||
return new LogDestination(config);
|
||||
}
|
||||
221
ts/domain/logging/types.ts
Normal file
221
ts/domain/logging/types.ts
Normal file
@@ -0,0 +1,221 @@
|
||||
/**
|
||||
* Logging domain types for structured log ingestion into Elasticsearch
|
||||
*/
|
||||
|
||||
import type { LogLevel } from '../../core/observability/logger.js';
|
||||
|
||||
/**
|
||||
* Log entry structure
|
||||
*/
|
||||
export interface LogEntry {
|
||||
/** ISO timestamp */
|
||||
timestamp: string;
|
||||
|
||||
/** Log level */
|
||||
level: LogLevel;
|
||||
|
||||
/** Log message */
|
||||
message: string;
|
||||
|
||||
/** Optional correlation ID for request tracing */
|
||||
correlationId?: string;
|
||||
|
||||
/** Service name */
|
||||
service?: string;
|
||||
|
||||
/** Service version */
|
||||
version?: string;
|
||||
|
||||
/** Hostname or container ID */
|
||||
host?: string;
|
||||
|
||||
/** Environment (production, staging, development) */
|
||||
environment?: string;
|
||||
|
||||
/** Additional structured data */
|
||||
metadata?: Record<string, unknown>;
|
||||
|
||||
/** Error details if log is error level */
|
||||
error?: {
|
||||
name: string;
|
||||
message: string;
|
||||
stack?: string;
|
||||
code?: string;
|
||||
};
|
||||
|
||||
/** Performance metrics */
|
||||
metrics?: {
|
||||
duration?: number;
|
||||
memory?: number;
|
||||
cpu?: number;
|
||||
};
|
||||
|
||||
/** Tags for categorization */
|
||||
tags?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Log enrichment function
|
||||
*/
|
||||
export type LogEnricher = (entry: LogEntry) => LogEntry | Promise<LogEntry>;
|
||||
|
||||
/**
|
||||
* Log sampling strategy
|
||||
*/
|
||||
export type SamplingStrategy = 'all' | 'errors-only' | 'percentage' | 'rate-limit';
|
||||
|
||||
/**
|
||||
* Sampling configuration
|
||||
*/
|
||||
export interface SamplingConfig {
|
||||
/** Sampling strategy */
|
||||
strategy: SamplingStrategy;
|
||||
|
||||
/** For percentage strategy: 0-100 */
|
||||
percentage?: number;
|
||||
|
||||
/** For rate-limit strategy: logs per second */
|
||||
maxLogsPerSecond?: number;
|
||||
|
||||
/** Always sample errors regardless of strategy */
|
||||
alwaysSampleErrors?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* ILM (Index Lifecycle Management) policy configuration
|
||||
*/
|
||||
export interface ILMPolicyConfig {
|
||||
/** Policy name */
|
||||
name: string;
|
||||
|
||||
/** Hot phase: how long to keep in hot tier */
|
||||
hotDuration?: string; // e.g., "7d"
|
||||
|
||||
/** Warm phase: move to warm tier after */
|
||||
warmDuration?: string; // e.g., "30d"
|
||||
|
||||
/** Cold phase: move to cold tier after */
|
||||
coldDuration?: string; // e.g., "90d"
|
||||
|
||||
/** Delete phase: delete after */
|
||||
deleteDuration?: string; // e.g., "365d"
|
||||
|
||||
/** Rollover settings */
|
||||
rollover?: {
|
||||
maxSize?: string; // e.g., "50gb"
|
||||
maxAge?: string; // e.g., "1d"
|
||||
maxDocs?: number;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Metric extraction pattern
|
||||
*/
|
||||
export interface MetricExtraction {
|
||||
/** Metric name */
|
||||
name: string;
|
||||
|
||||
/** Field path to extract (dot notation) */
|
||||
field: string;
|
||||
|
||||
/** Metric type */
|
||||
type: 'counter' | 'gauge' | 'histogram';
|
||||
|
||||
/** Optional labels to extract */
|
||||
labels?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Log destination configuration
|
||||
*/
|
||||
export interface LogDestinationConfig {
|
||||
/** Index name pattern (supports date math) */
|
||||
indexPattern: string;
|
||||
|
||||
/** Batch size for bulk operations */
|
||||
batchSize?: number;
|
||||
|
||||
/** Flush interval in milliseconds */
|
||||
flushIntervalMs?: number;
|
||||
|
||||
/** Maximum queue size before dropping logs */
|
||||
maxQueueSize?: number;
|
||||
|
||||
/** Enrichers to apply */
|
||||
enrichers?: LogEnricher[];
|
||||
|
||||
/** Sampling configuration */
|
||||
sampling?: SamplingConfig;
|
||||
|
||||
/** ILM policy */
|
||||
ilm?: ILMPolicyConfig;
|
||||
|
||||
/** Metric extractions */
|
||||
metrics?: MetricExtraction[];
|
||||
|
||||
/** Auto-create index template */
|
||||
autoCreateTemplate?: boolean;
|
||||
|
||||
/** Custom index template settings */
|
||||
templateSettings?: {
|
||||
numberOfShards?: number;
|
||||
numberOfReplicas?: number;
|
||||
refreshInterval?: string;
|
||||
codec?: 'default' | 'best_compression';
|
||||
};
|
||||
|
||||
/** Custom index mappings */
|
||||
templateMappings?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch result for log ingestion
|
||||
*/
|
||||
export interface LogBatchResult {
|
||||
/** Number of successfully indexed logs */
|
||||
successful: number;
|
||||
|
||||
/** Number of failed logs */
|
||||
failed: number;
|
||||
|
||||
/** Total logs in batch */
|
||||
total: number;
|
||||
|
||||
/** Errors encountered */
|
||||
errors?: Array<{
|
||||
log: LogEntry;
|
||||
error: string;
|
||||
}>;
|
||||
|
||||
/** Time taken in milliseconds */
|
||||
durationMs: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Log destination statistics
|
||||
*/
|
||||
export interface LogDestinationStats {
|
||||
/** Total logs sent */
|
||||
totalLogs: number;
|
||||
|
||||
/** Total logs successfully indexed */
|
||||
totalSuccessful: number;
|
||||
|
||||
/** Total logs failed */
|
||||
totalFailed: number;
|
||||
|
||||
/** Total logs sampled out */
|
||||
totalSampled: number;
|
||||
|
||||
/** Total logs dropped due to queue overflow */
|
||||
totalDropped: number;
|
||||
|
||||
/** Current queue size */
|
||||
queueSize: number;
|
||||
|
||||
/** Average batch duration */
|
||||
avgBatchDurationMs: number;
|
||||
|
||||
/** Last flush timestamp */
|
||||
lastFlushAt?: Date;
|
||||
}
|
||||
324
ts/domain/query/aggregation-builder.ts
Normal file
324
ts/domain/query/aggregation-builder.ts
Normal file
@@ -0,0 +1,324 @@
|
||||
import type {
|
||||
AggregationDSL,
|
||||
TermsAggregation,
|
||||
MetricAggregation,
|
||||
StatsAggregation,
|
||||
ExtendedStatsAggregation,
|
||||
PercentilesAggregation,
|
||||
DateHistogramAggregation,
|
||||
HistogramAggregation,
|
||||
RangeAggregation,
|
||||
FilterAggregation,
|
||||
TopHitsAggregation,
|
||||
QueryDSL,
|
||||
SortOrder,
|
||||
SortField,
|
||||
} from './types.js';
|
||||
|
||||
/**
|
||||
* Fluent aggregation builder for type-safe Elasticsearch aggregations
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const query = new QueryBuilder<Product>('products')
|
||||
* .aggregations((agg) => {
|
||||
* agg.terms('categories', 'category.keyword', { size: 10 })
|
||||
* .subAggregation('avg_price', (sub) => sub.avg('price'));
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export class AggregationBuilder {
|
||||
private aggregations: Record<string, AggregationDSL> = {};
|
||||
private currentAggName?: string;
|
||||
|
||||
/**
|
||||
* Add a terms aggregation
|
||||
*/
|
||||
terms(
|
||||
name: string,
|
||||
field: string,
|
||||
options?: {
|
||||
size?: number;
|
||||
order?: Record<string, SortOrder>;
|
||||
missing?: string | number;
|
||||
}
|
||||
): this {
|
||||
const termsAgg: TermsAggregation = {
|
||||
terms: {
|
||||
field,
|
||||
...options,
|
||||
},
|
||||
};
|
||||
this.aggregations[name] = termsAgg;
|
||||
this.currentAggName = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an average metric aggregation
|
||||
*/
|
||||
avg(name: string, field: string, missing?: number): this {
|
||||
const avgAgg: MetricAggregation = {
|
||||
avg: {
|
||||
field,
|
||||
...(missing !== undefined && { missing }),
|
||||
},
|
||||
};
|
||||
this.aggregations[name] = avgAgg;
|
||||
this.currentAggName = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a sum metric aggregation
|
||||
*/
|
||||
sum(name: string, field: string, missing?: number): this {
|
||||
const sumAgg: MetricAggregation = {
|
||||
sum: {
|
||||
field,
|
||||
...(missing !== undefined && { missing }),
|
||||
},
|
||||
};
|
||||
this.aggregations[name] = sumAgg;
|
||||
this.currentAggName = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a min metric aggregation
|
||||
*/
|
||||
min(name: string, field: string, missing?: number): this {
|
||||
const minAgg: MetricAggregation = {
|
||||
min: {
|
||||
field,
|
||||
...(missing !== undefined && { missing }),
|
||||
},
|
||||
};
|
||||
this.aggregations[name] = minAgg;
|
||||
this.currentAggName = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a max metric aggregation
|
||||
*/
|
||||
max(name: string, field: string, missing?: number): this {
|
||||
const maxAgg: MetricAggregation = {
|
||||
max: {
|
||||
field,
|
||||
...(missing !== undefined && { missing }),
|
||||
},
|
||||
};
|
||||
this.aggregations[name] = maxAgg;
|
||||
this.currentAggName = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a cardinality metric aggregation
|
||||
*/
|
||||
cardinality(name: string, field: string): this {
|
||||
const cardinalityAgg: MetricAggregation = {
|
||||
cardinality: {
|
||||
field,
|
||||
},
|
||||
};
|
||||
this.aggregations[name] = cardinalityAgg;
|
||||
this.currentAggName = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a stats aggregation
|
||||
*/
|
||||
stats(name: string, field: string): this {
|
||||
const statsAgg: StatsAggregation = {
|
||||
stats: {
|
||||
field,
|
||||
},
|
||||
};
|
||||
this.aggregations[name] = statsAgg;
|
||||
this.currentAggName = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an extended stats aggregation
|
||||
*/
|
||||
extendedStats(name: string, field: string): this {
|
||||
const extendedStatsAgg: ExtendedStatsAggregation = {
|
||||
extended_stats: {
|
||||
field,
|
||||
},
|
||||
};
|
||||
this.aggregations[name] = extendedStatsAgg;
|
||||
this.currentAggName = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a percentiles aggregation
|
||||
*/
|
||||
percentiles(name: string, field: string, percents?: number[]): this {
|
||||
const percentilesAgg: PercentilesAggregation = {
|
||||
percentiles: {
|
||||
field,
|
||||
...(percents && { percents }),
|
||||
},
|
||||
};
|
||||
this.aggregations[name] = percentilesAgg;
|
||||
this.currentAggName = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a date histogram aggregation
|
||||
*/
|
||||
dateHistogram(
|
||||
name: string,
|
||||
field: string,
|
||||
options: {
|
||||
calendar_interval?: string;
|
||||
fixed_interval?: string;
|
||||
format?: string;
|
||||
time_zone?: string;
|
||||
min_doc_count?: number;
|
||||
}
|
||||
): this {
|
||||
const dateHistogramAgg: DateHistogramAggregation = {
|
||||
date_histogram: {
|
||||
field,
|
||||
...options,
|
||||
},
|
||||
};
|
||||
this.aggregations[name] = dateHistogramAgg;
|
||||
this.currentAggName = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a histogram aggregation
|
||||
*/
|
||||
histogram(
|
||||
name: string,
|
||||
field: string,
|
||||
interval: number,
|
||||
options?: {
|
||||
min_doc_count?: number;
|
||||
}
|
||||
): this {
|
||||
const histogramAgg: HistogramAggregation = {
|
||||
histogram: {
|
||||
field,
|
||||
interval,
|
||||
...options,
|
||||
},
|
||||
};
|
||||
this.aggregations[name] = histogramAgg;
|
||||
this.currentAggName = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a range aggregation
|
||||
*/
|
||||
range(
|
||||
name: string,
|
||||
field: string,
|
||||
ranges: Array<{ from?: number; to?: number; key?: string }>
|
||||
): this {
|
||||
const rangeAgg: RangeAggregation = {
|
||||
range: {
|
||||
field,
|
||||
ranges,
|
||||
},
|
||||
};
|
||||
this.aggregations[name] = rangeAgg;
|
||||
this.currentAggName = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a filter aggregation
|
||||
*/
|
||||
filterAgg(name: string, filter: QueryDSL): this {
|
||||
const filterAgg: FilterAggregation = {
|
||||
filter,
|
||||
};
|
||||
this.aggregations[name] = filterAgg;
|
||||
this.currentAggName = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a top hits aggregation
|
||||
*/
|
||||
topHits(
|
||||
name: string,
|
||||
options?: {
|
||||
size?: number;
|
||||
sort?: Array<SortField | string>;
|
||||
_source?: boolean | { includes?: string[]; excludes?: string[] };
|
||||
}
|
||||
): this {
|
||||
const topHitsAgg: TopHitsAggregation = {
|
||||
top_hits: {
|
||||
...options,
|
||||
},
|
||||
};
|
||||
this.aggregations[name] = topHitsAgg;
|
||||
this.currentAggName = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a sub-aggregation to the last defined aggregation
|
||||
*/
|
||||
subAggregation(name: string, configure: (builder: AggregationBuilder) => void): this {
|
||||
if (!this.currentAggName) {
|
||||
throw new Error('Cannot add sub-aggregation: no parent aggregation defined');
|
||||
}
|
||||
|
||||
const parentAgg = this.aggregations[this.currentAggName];
|
||||
const subBuilder = new AggregationBuilder();
|
||||
configure(subBuilder);
|
||||
|
||||
// Add aggs field to parent aggregation
|
||||
if ('terms' in parentAgg) {
|
||||
(parentAgg as TermsAggregation).aggs = subBuilder.build();
|
||||
} else if ('date_histogram' in parentAgg) {
|
||||
(parentAgg as DateHistogramAggregation).aggs = subBuilder.build();
|
||||
} else if ('histogram' in parentAgg) {
|
||||
(parentAgg as HistogramAggregation).aggs = subBuilder.build();
|
||||
} else if ('range' in parentAgg) {
|
||||
(parentAgg as RangeAggregation).aggs = subBuilder.build();
|
||||
} else if ('filter' in parentAgg) {
|
||||
(parentAgg as FilterAggregation).aggs = subBuilder.build();
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a custom aggregation DSL
|
||||
*/
|
||||
custom(name: string, aggregation: AggregationDSL): this {
|
||||
this.aggregations[name] = aggregation;
|
||||
this.currentAggName = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the aggregations object
|
||||
*/
|
||||
build(): Record<string, AggregationDSL> {
|
||||
return this.aggregations;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new aggregation builder
|
||||
*/
|
||||
export function createAggregationBuilder(): AggregationBuilder {
|
||||
return new AggregationBuilder();
|
||||
}
|
||||
67
ts/domain/query/index.ts
Normal file
67
ts/domain/query/index.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
/**
|
||||
* Query Builder Module
|
||||
*
|
||||
* Type-safe query construction for Elasticsearch
|
||||
*/
|
||||
|
||||
// Query Builder
|
||||
export { QueryBuilder, createQuery } from './query-builder.js';
|
||||
|
||||
// Aggregation Builder
|
||||
export { AggregationBuilder, createAggregationBuilder } from './aggregation-builder.js';
|
||||
|
||||
// Types
|
||||
export type {
|
||||
// Query types
|
||||
QueryType,
|
||||
QueryDSL,
|
||||
BoolClause,
|
||||
BoolQuery,
|
||||
MatchQuery,
|
||||
MatchPhraseQuery,
|
||||
MultiMatchQuery,
|
||||
TermQuery,
|
||||
TermsQuery,
|
||||
RangeQuery,
|
||||
ExistsQuery,
|
||||
PrefixQuery,
|
||||
WildcardQuery,
|
||||
RegexpQuery,
|
||||
FuzzyQuery,
|
||||
IdsQuery,
|
||||
MatchAllQuery,
|
||||
QueryStringQuery,
|
||||
SimpleQueryStringQuery,
|
||||
|
||||
// Options
|
||||
SearchOptions,
|
||||
SortOrder,
|
||||
SortField,
|
||||
MatchOperator,
|
||||
MultiMatchType,
|
||||
RangeBounds,
|
||||
|
||||
// Aggregation types
|
||||
AggregationType,
|
||||
AggregationDSL,
|
||||
TermsAggregation,
|
||||
MetricAggregation,
|
||||
StatsAggregation,
|
||||
ExtendedStatsAggregation,
|
||||
PercentilesAggregation,
|
||||
DateHistogramAggregation,
|
||||
HistogramAggregation,
|
||||
RangeAggregation,
|
||||
FilterAggregation,
|
||||
TopHitsAggregation,
|
||||
|
||||
// Results
|
||||
SearchResult,
|
||||
SearchHit,
|
||||
AggregationResult,
|
||||
AggregationBucket,
|
||||
TermsAggregationResult,
|
||||
MetricAggregationResult,
|
||||
StatsAggregationResult,
|
||||
PercentilesAggregationResult,
|
||||
} from './types.js';
|
||||
629
ts/domain/query/query-builder.ts
Normal file
629
ts/domain/query/query-builder.ts
Normal file
@@ -0,0 +1,629 @@
|
||||
import type {
|
||||
QueryDSL,
|
||||
BoolQuery,
|
||||
MatchQuery,
|
||||
MatchPhraseQuery,
|
||||
MultiMatchQuery,
|
||||
TermQuery,
|
||||
TermsQuery,
|
||||
RangeQuery,
|
||||
ExistsQuery,
|
||||
PrefixQuery,
|
||||
WildcardQuery,
|
||||
RegexpQuery,
|
||||
FuzzyQuery,
|
||||
IdsQuery,
|
||||
MatchAllQuery,
|
||||
QueryStringQuery,
|
||||
SimpleQueryStringQuery,
|
||||
SearchOptions,
|
||||
SearchResult,
|
||||
SortOrder,
|
||||
MatchOperator,
|
||||
MultiMatchType,
|
||||
RangeBounds,
|
||||
SortField,
|
||||
} from './types.js';
|
||||
import type { AggregationBuilder } from './aggregation-builder.js';
|
||||
import { createAggregationBuilder } from './aggregation-builder.js';
|
||||
import { ElasticsearchConnectionManager } from '../../core/connection/connection-manager.js';
|
||||
import { defaultLogger } from '../../core/observability/logger.js';
|
||||
import { defaultMetrics } from '../../core/observability/metrics.js';
|
||||
import { defaultTracing } from '../../core/observability/tracing.js';
|
||||
|
||||
/**
|
||||
* Fluent query builder for type-safe Elasticsearch queries
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const results = await new QueryBuilder<Product>('products')
|
||||
* .match('name', 'laptop')
|
||||
* .range('price', { gte: 100, lte: 1000 })
|
||||
* .sort('price', 'asc')
|
||||
* .size(20)
|
||||
* .execute();
|
||||
* ```
|
||||
*/
|
||||
export class QueryBuilder<T = unknown> {
|
||||
private index: string;
|
||||
private queryDSL: QueryDSL | null = null;
|
||||
private boolClauses: {
|
||||
must: QueryDSL[];
|
||||
should: QueryDSL[];
|
||||
must_not: QueryDSL[];
|
||||
filter: QueryDSL[];
|
||||
} = {
|
||||
must: [],
|
||||
should: [],
|
||||
must_not: [],
|
||||
filter: [],
|
||||
};
|
||||
private minimumShouldMatch?: number | string;
|
||||
private sortFields: Array<SortField | string> = [];
|
||||
private sourceFields?: string[];
|
||||
private excludeSourceFields?: string[];
|
||||
private resultSize: number = 10;
|
||||
private resultFrom: number = 0;
|
||||
private shouldTrackTotalHits: boolean | number = true;
|
||||
private searchTimeout?: string;
|
||||
private aggregationBuilder?: AggregationBuilder;
|
||||
private highlightConfig?: SearchOptions['highlight'];
|
||||
|
||||
constructor(index: string) {
|
||||
this.index = index;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new query builder instance
|
||||
*/
|
||||
static create<T>(index: string): QueryBuilder<T> {
|
||||
return new QueryBuilder<T>(index);
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Query Methods
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Add a match query
|
||||
*/
|
||||
match(field: string, query: string, options?: { operator?: MatchOperator; fuzziness?: number | 'AUTO'; boost?: number }): this {
|
||||
const matchQuery: MatchQuery = {
|
||||
match: {
|
||||
[field]: {
|
||||
query,
|
||||
...options,
|
||||
},
|
||||
},
|
||||
};
|
||||
this.boolClauses.must.push(matchQuery);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a match phrase query
|
||||
*/
|
||||
matchPhrase(field: string, query: string, options?: { slop?: number; boost?: number }): this {
|
||||
const matchPhraseQuery: MatchPhraseQuery = {
|
||||
match_phrase: {
|
||||
[field]: {
|
||||
query,
|
||||
...options,
|
||||
},
|
||||
},
|
||||
};
|
||||
this.boolClauses.must.push(matchPhraseQuery);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a multi-match query
|
||||
*/
|
||||
multiMatch(query: string, fields: string[], options?: { type?: MultiMatchType; operator?: MatchOperator; boost?: number }): this {
|
||||
const multiMatchQuery: MultiMatchQuery = {
|
||||
multi_match: {
|
||||
query,
|
||||
fields,
|
||||
...options,
|
||||
},
|
||||
};
|
||||
this.boolClauses.must.push(multiMatchQuery);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a term query (exact match)
|
||||
*/
|
||||
term(field: string, value: string | number | boolean, boost?: number): this {
|
||||
const termQuery: TermQuery = {
|
||||
term: {
|
||||
[field]: {
|
||||
value,
|
||||
...(boost && { boost }),
|
||||
},
|
||||
},
|
||||
};
|
||||
this.boolClauses.filter.push(termQuery);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a terms query (match any of the values)
|
||||
*/
|
||||
terms(field: string, values: Array<string | number | boolean>, boost?: number): this {
|
||||
const termsQuery: TermsQuery = {
|
||||
terms: {
|
||||
[field]: values,
|
||||
...(boost && { boost }),
|
||||
},
|
||||
};
|
||||
this.boolClauses.filter.push(termsQuery);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a range query
|
||||
*/
|
||||
range(field: string, bounds: RangeBounds, boost?: number): this {
|
||||
const rangeQuery: RangeQuery = {
|
||||
range: {
|
||||
[field]: {
|
||||
...bounds,
|
||||
...(boost && { boost }),
|
||||
},
|
||||
},
|
||||
};
|
||||
this.boolClauses.filter.push(rangeQuery);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an exists query (field must exist)
|
||||
*/
|
||||
exists(field: string): this {
|
||||
const existsQuery: ExistsQuery = {
|
||||
exists: {
|
||||
field,
|
||||
},
|
||||
};
|
||||
this.boolClauses.filter.push(existsQuery);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a prefix query
|
||||
*/
|
||||
prefix(field: string, value: string, boost?: number): this {
|
||||
const prefixQuery: PrefixQuery = {
|
||||
prefix: {
|
||||
[field]: {
|
||||
value,
|
||||
...(boost && { boost }),
|
||||
},
|
||||
},
|
||||
};
|
||||
this.boolClauses.must.push(prefixQuery);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a wildcard query
|
||||
*/
|
||||
wildcard(field: string, value: string, boost?: number): this {
|
||||
const wildcardQuery: WildcardQuery = {
|
||||
wildcard: {
|
||||
[field]: {
|
||||
value,
|
||||
...(boost && { boost }),
|
||||
},
|
||||
},
|
||||
};
|
||||
this.boolClauses.must.push(wildcardQuery);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a regexp query
|
||||
*/
|
||||
regexp(field: string, value: string, options?: { flags?: string; boost?: number }): this {
|
||||
const regexpQuery: RegexpQuery = {
|
||||
regexp: {
|
||||
[field]: {
|
||||
value,
|
||||
...options,
|
||||
},
|
||||
},
|
||||
};
|
||||
this.boolClauses.must.push(regexpQuery);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a fuzzy query
|
||||
*/
|
||||
fuzzy(field: string, value: string, options?: { fuzziness?: number | 'AUTO'; boost?: number }): this {
|
||||
const fuzzyQuery: FuzzyQuery = {
|
||||
fuzzy: {
|
||||
[field]: {
|
||||
value,
|
||||
...options,
|
||||
},
|
||||
},
|
||||
};
|
||||
this.boolClauses.must.push(fuzzyQuery);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an IDs query
|
||||
*/
|
||||
ids(values: string[]): this {
|
||||
const idsQuery: IdsQuery = {
|
||||
ids: {
|
||||
values,
|
||||
},
|
||||
};
|
||||
this.boolClauses.filter.push(idsQuery);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a query string query
|
||||
*/
|
||||
queryString(query: string, options?: { default_field?: string; fields?: string[]; default_operator?: MatchOperator; boost?: number }): this {
|
||||
const queryStringQuery: QueryStringQuery = {
|
||||
query_string: {
|
||||
query,
|
||||
...options,
|
||||
},
|
||||
};
|
||||
this.boolClauses.must.push(queryStringQuery);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a simple query string query
|
||||
*/
|
||||
simpleQueryString(query: string, options?: { fields?: string[]; default_operator?: MatchOperator; boost?: number }): this {
|
||||
const simpleQueryStringQuery: SimpleQueryStringQuery = {
|
||||
simple_query_string: {
|
||||
query,
|
||||
...options,
|
||||
},
|
||||
};
|
||||
this.boolClauses.must.push(simpleQueryStringQuery);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Match all documents
|
||||
*/
|
||||
matchAll(boost?: number): this {
|
||||
const matchAllQuery: MatchAllQuery = {
|
||||
match_all: {
|
||||
...(boost && { boost }),
|
||||
},
|
||||
};
|
||||
this.queryDSL = matchAllQuery;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a custom query to the must clause
|
||||
*/
|
||||
must(query: QueryDSL): this {
|
||||
this.boolClauses.must.push(query);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a custom query to the should clause
|
||||
*/
|
||||
should(query: QueryDSL): this {
|
||||
this.boolClauses.should.push(query);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a custom query to the must_not clause
|
||||
*/
|
||||
mustNot(query: QueryDSL): this {
|
||||
this.boolClauses.must_not.push(query);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a custom query to the filter clause
|
||||
*/
|
||||
filter(query: QueryDSL): this {
|
||||
this.boolClauses.filter.push(query);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set minimum_should_match for boolean queries
|
||||
*/
|
||||
minimumMatch(value: number | string): this {
|
||||
this.minimumShouldMatch = value;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a custom query DSL (replaces builder queries)
|
||||
*/
|
||||
customQuery(query: QueryDSL): this {
|
||||
this.queryDSL = query;
|
||||
return this;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Result Shaping Methods
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Add sorting
|
||||
*/
|
||||
sort(field: string, order: SortOrder = 'asc'): this {
|
||||
this.sortFields.push({ [field]: { order } });
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add custom sort configuration
|
||||
*/
|
||||
customSort(sort: SortField | string): this {
|
||||
this.sortFields.push(sort);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify fields to include in results (source filtering)
|
||||
*/
|
||||
fields(fields: string[]): this {
|
||||
this.sourceFields = fields;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify fields to exclude from results
|
||||
*/
|
||||
exclude(fields: string[]): this {
|
||||
this.excludeSourceFields = fields;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set number of results to return
|
||||
*/
|
||||
size(size: number): this {
|
||||
this.resultSize = size;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set offset for pagination
|
||||
*/
|
||||
from(from: number): this {
|
||||
this.resultFrom = from;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set pagination (convenience method)
|
||||
*/
|
||||
paginate(page: number, pageSize: number): this {
|
||||
this.resultFrom = (page - 1) * pageSize;
|
||||
this.resultSize = pageSize;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set whether to track total hits
|
||||
*/
|
||||
trackTotalHits(track: boolean | number): this {
|
||||
this.shouldTrackTotalHits = track;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set search timeout
|
||||
*/
|
||||
timeout(timeout: string): this {
|
||||
this.searchTimeout = timeout;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure highlighting
|
||||
*/
|
||||
highlight(config: SearchOptions['highlight']): this {
|
||||
this.highlightConfig = config;
|
||||
return this;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Aggregation Methods
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Get aggregation builder
|
||||
*/
|
||||
aggregations(configure: (builder: AggregationBuilder) => void): this {
|
||||
if (!this.aggregationBuilder) {
|
||||
this.aggregationBuilder = createAggregationBuilder();
|
||||
}
|
||||
configure(this.aggregationBuilder);
|
||||
return this;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Build & Execute
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Build the final query DSL
|
||||
*/
|
||||
build(): SearchOptions {
|
||||
let finalQuery: QueryDSL | undefined;
|
||||
|
||||
// If custom query was set, use it
|
||||
if (this.queryDSL) {
|
||||
finalQuery = this.queryDSL;
|
||||
} else {
|
||||
// Otherwise, build from bool clauses
|
||||
const hasAnyClauses =
|
||||
this.boolClauses.must.length > 0 ||
|
||||
this.boolClauses.should.length > 0 ||
|
||||
this.boolClauses.must_not.length > 0 ||
|
||||
this.boolClauses.filter.length > 0;
|
||||
|
||||
if (hasAnyClauses) {
|
||||
const boolQuery: BoolQuery = {
|
||||
bool: {},
|
||||
};
|
||||
|
||||
if (this.boolClauses.must.length > 0) {
|
||||
boolQuery.bool.must = this.boolClauses.must;
|
||||
}
|
||||
if (this.boolClauses.should.length > 0) {
|
||||
boolQuery.bool.should = this.boolClauses.should;
|
||||
}
|
||||
if (this.boolClauses.must_not.length > 0) {
|
||||
boolQuery.bool.must_not = this.boolClauses.must_not;
|
||||
}
|
||||
if (this.boolClauses.filter.length > 0) {
|
||||
boolQuery.bool.filter = this.boolClauses.filter;
|
||||
}
|
||||
if (this.minimumShouldMatch !== undefined) {
|
||||
boolQuery.bool.minimum_should_match = this.minimumShouldMatch;
|
||||
}
|
||||
|
||||
finalQuery = boolQuery;
|
||||
}
|
||||
}
|
||||
|
||||
const searchOptions: SearchOptions = {
|
||||
...(finalQuery && { query: finalQuery }),
|
||||
...(this.sourceFields && { fields: this.sourceFields }),
|
||||
...(this.excludeSourceFields && { excludeFields: this.excludeSourceFields }),
|
||||
size: this.resultSize,
|
||||
from: this.resultFrom,
|
||||
...(this.sortFields.length > 0 && { sort: this.sortFields }),
|
||||
trackTotalHits: this.shouldTrackTotalHits,
|
||||
...(this.searchTimeout && { timeout: this.searchTimeout }),
|
||||
...(this.highlightConfig && { highlight: this.highlightConfig }),
|
||||
...(this.aggregationBuilder && { aggregations: this.aggregationBuilder.build() }),
|
||||
};
|
||||
|
||||
return searchOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the query and return results
|
||||
*/
|
||||
async execute(): Promise<SearchResult<T>> {
|
||||
const span = defaultTracing.createSpan('query.execute', {
|
||||
'db.system': 'elasticsearch',
|
||||
'db.operation': 'search',
|
||||
'db.elasticsearch.index': this.index,
|
||||
});
|
||||
|
||||
try {
|
||||
const client = ElasticsearchConnectionManager.getInstance().getClient();
|
||||
const searchOptions = this.build();
|
||||
|
||||
defaultLogger.debug('Executing query', {
|
||||
index: this.index,
|
||||
query: searchOptions.query,
|
||||
size: searchOptions.size,
|
||||
from: searchOptions.from,
|
||||
});
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
// Execute search
|
||||
const result = await client.search<T>({
|
||||
index: this.index,
|
||||
...searchOptions,
|
||||
});
|
||||
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
// Record metrics
|
||||
defaultMetrics.requestsTotal.inc({ operation: 'search', index: this.index });
|
||||
defaultMetrics.requestDuration.observe({ operation: 'search', index: this.index }, duration);
|
||||
|
||||
defaultLogger.info('Query executed successfully', {
|
||||
index: this.index,
|
||||
took: result.took,
|
||||
hits: result.hits.total,
|
||||
duration,
|
||||
});
|
||||
|
||||
span.setAttributes({
|
||||
'db.elasticsearch.took': result.took,
|
||||
'db.elasticsearch.hits': typeof result.hits.total === 'object' ? result.hits.total.value : result.hits.total,
|
||||
});
|
||||
span.end();
|
||||
|
||||
return result as SearchResult<T>;
|
||||
} catch (error) {
|
||||
defaultMetrics.requestErrors.inc({ operation: 'search', index: this.index });
|
||||
defaultLogger.error('Query execution failed', { index: this.index, error: error instanceof Error ? error.message : String(error) });
|
||||
span.recordException(error as Error);
|
||||
span.end();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute query and return only the hits
|
||||
*/
|
||||
async executeAndGetHits(): Promise<SearchResult<T>['hits']['hits']> {
|
||||
const result = await this.execute();
|
||||
return result.hits.hits;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute query and return only the source documents
|
||||
*/
|
||||
async executeAndGetSources(): Promise<T[]> {
|
||||
const hits = await this.executeAndGetHits();
|
||||
return hits.map((hit) => hit._source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Count documents matching the query
|
||||
*/
|
||||
async count(): Promise<number> {
|
||||
const span = defaultTracing.createSpan('query.count', {
|
||||
'db.system': 'elasticsearch',
|
||||
'db.operation': 'count',
|
||||
'db.elasticsearch.index': this.index,
|
||||
});
|
||||
|
||||
try {
|
||||
const client = ElasticsearchConnectionManager.getInstance().getClient();
|
||||
const searchOptions = this.build();
|
||||
|
||||
const result = await client.count({
|
||||
index: this.index,
|
||||
...(searchOptions.query && { query: searchOptions.query }),
|
||||
});
|
||||
|
||||
span.end();
|
||||
return result.count;
|
||||
} catch (error) {
|
||||
span.recordException(error as Error);
|
||||
span.end();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new query builder instance
|
||||
*/
|
||||
export function createQuery<T>(index: string): QueryBuilder<T> {
|
||||
return new QueryBuilder<T>(index);
|
||||
}
|
||||
563
ts/domain/query/types.ts
Normal file
563
ts/domain/query/types.ts
Normal file
@@ -0,0 +1,563 @@
|
||||
/**
|
||||
* Query DSL type definitions for type-safe Elasticsearch queries
|
||||
*/
|
||||
|
||||
/**
|
||||
* Elasticsearch query types
|
||||
*/
|
||||
export type QueryType =
|
||||
| 'match'
|
||||
| 'match_phrase'
|
||||
| 'multi_match'
|
||||
| 'term'
|
||||
| 'terms'
|
||||
| 'range'
|
||||
| 'exists'
|
||||
| 'prefix'
|
||||
| 'wildcard'
|
||||
| 'regexp'
|
||||
| 'fuzzy'
|
||||
| 'ids'
|
||||
| 'bool'
|
||||
| 'match_all'
|
||||
| 'query_string'
|
||||
| 'simple_query_string';
|
||||
|
||||
/**
|
||||
* Boolean query clause types
|
||||
*/
|
||||
export type BoolClause = 'must' | 'should' | 'must_not' | 'filter';
|
||||
|
||||
/**
|
||||
* Sort order
|
||||
*/
|
||||
export type SortOrder = 'asc' | 'desc';
|
||||
|
||||
/**
|
||||
* Match query operator
|
||||
*/
|
||||
export type MatchOperator = 'or' | 'and';
|
||||
|
||||
/**
|
||||
* Multi-match type
|
||||
*/
|
||||
export type MultiMatchType =
|
||||
| 'best_fields'
|
||||
| 'most_fields'
|
||||
| 'cross_fields'
|
||||
| 'phrase'
|
||||
| 'phrase_prefix'
|
||||
| 'bool_prefix';
|
||||
|
||||
/**
|
||||
* Range query bounds
|
||||
*/
|
||||
export interface RangeBounds {
|
||||
gt?: number | string | Date;
|
||||
gte?: number | string | Date;
|
||||
lt?: number | string | Date;
|
||||
lte?: number | string | Date;
|
||||
}
|
||||
|
||||
/**
|
||||
* Match query definition
|
||||
*/
|
||||
export interface MatchQuery {
|
||||
match: {
|
||||
[field: string]: {
|
||||
query: string;
|
||||
operator?: MatchOperator;
|
||||
fuzziness?: number | 'AUTO';
|
||||
boost?: number;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Match phrase query definition
|
||||
*/
|
||||
export interface MatchPhraseQuery {
|
||||
match_phrase: {
|
||||
[field: string]: {
|
||||
query: string;
|
||||
slop?: number;
|
||||
boost?: number;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Multi-match query definition
|
||||
*/
|
||||
export interface MultiMatchQuery {
|
||||
multi_match: {
|
||||
query: string;
|
||||
fields: string[];
|
||||
type?: MultiMatchType;
|
||||
operator?: MatchOperator;
|
||||
boost?: number;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Term query definition
|
||||
*/
|
||||
export interface TermQuery {
|
||||
term: {
|
||||
[field: string]: {
|
||||
value: string | number | boolean;
|
||||
boost?: number;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Terms query definition
|
||||
*/
|
||||
export interface TermsQuery {
|
||||
terms: {
|
||||
[field: string]: Array<string | number | boolean>;
|
||||
boost?: number;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Range query definition
|
||||
*/
|
||||
export interface RangeQuery {
|
||||
range: {
|
||||
[field: string]: RangeBounds & {
|
||||
boost?: number;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Exists query definition
|
||||
*/
|
||||
export interface ExistsQuery {
|
||||
exists: {
|
||||
field: string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Prefix query definition
|
||||
*/
|
||||
export interface PrefixQuery {
|
||||
prefix: {
|
||||
[field: string]: {
|
||||
value: string;
|
||||
boost?: number;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Wildcard query definition
|
||||
*/
|
||||
export interface WildcardQuery {
|
||||
wildcard: {
|
||||
[field: string]: {
|
||||
value: string;
|
||||
boost?: number;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Regexp query definition
|
||||
*/
|
||||
export interface RegexpQuery {
|
||||
regexp: {
|
||||
[field: string]: {
|
||||
value: string;
|
||||
flags?: string;
|
||||
boost?: number;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Fuzzy query definition
|
||||
*/
|
||||
export interface FuzzyQuery {
|
||||
fuzzy: {
|
||||
[field: string]: {
|
||||
value: string;
|
||||
fuzziness?: number | 'AUTO';
|
||||
boost?: number;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* IDs query definition
|
||||
*/
|
||||
export interface IdsQuery {
|
||||
ids: {
|
||||
values: string[];
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Match all query definition
|
||||
*/
|
||||
export interface MatchAllQuery {
|
||||
match_all: {
|
||||
boost?: number;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Query string query definition
|
||||
*/
|
||||
export interface QueryStringQuery {
|
||||
query_string: {
|
||||
query: string;
|
||||
default_field?: string;
|
||||
fields?: string[];
|
||||
default_operator?: MatchOperator;
|
||||
boost?: number;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple query string query definition
|
||||
*/
|
||||
export interface SimpleQueryStringQuery {
|
||||
simple_query_string: {
|
||||
query: string;
|
||||
fields?: string[];
|
||||
default_operator?: MatchOperator;
|
||||
boost?: number;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Boolean query definition
|
||||
*/
|
||||
export interface BoolQuery {
|
||||
bool: {
|
||||
must?: QueryDSL[];
|
||||
should?: QueryDSL[];
|
||||
must_not?: QueryDSL[];
|
||||
filter?: QueryDSL[];
|
||||
minimum_should_match?: number | string;
|
||||
boost?: number;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Union of all query types
|
||||
*/
|
||||
export type QueryDSL =
|
||||
| MatchQuery
|
||||
| MatchPhraseQuery
|
||||
| MultiMatchQuery
|
||||
| TermQuery
|
||||
| TermsQuery
|
||||
| RangeQuery
|
||||
| ExistsQuery
|
||||
| PrefixQuery
|
||||
| WildcardQuery
|
||||
| RegexpQuery
|
||||
| FuzzyQuery
|
||||
| IdsQuery
|
||||
| MatchAllQuery
|
||||
| QueryStringQuery
|
||||
| SimpleQueryStringQuery
|
||||
| BoolQuery;
|
||||
|
||||
/**
|
||||
* Sort field definition
|
||||
*/
|
||||
export interface SortField {
|
||||
[field: string]: {
|
||||
order?: SortOrder;
|
||||
mode?: 'min' | 'max' | 'sum' | 'avg' | 'median';
|
||||
missing?: '_first' | '_last' | string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Search request options
|
||||
*/
|
||||
export interface SearchOptions {
|
||||
/** Query to execute */
|
||||
query?: QueryDSL;
|
||||
|
||||
/** Fields to return (source filtering) */
|
||||
fields?: string[];
|
||||
|
||||
/** Exclude source fields */
|
||||
excludeFields?: string[];
|
||||
|
||||
/** Number of results to return */
|
||||
size?: number;
|
||||
|
||||
/** Offset for pagination */
|
||||
from?: number;
|
||||
|
||||
/** Sort order */
|
||||
sort?: Array<SortField | string>;
|
||||
|
||||
/** Track total hits */
|
||||
trackTotalHits?: boolean | number;
|
||||
|
||||
/** Search timeout */
|
||||
timeout?: string;
|
||||
|
||||
/** Highlight configuration */
|
||||
highlight?: {
|
||||
fields: {
|
||||
[field: string]: {
|
||||
pre_tags?: string[];
|
||||
post_tags?: string[];
|
||||
fragment_size?: number;
|
||||
number_of_fragments?: number;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
/** Aggregations */
|
||||
aggregations?: Record<string, AggregationDSL>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Aggregation types
|
||||
*/
|
||||
export type AggregationType =
|
||||
| 'terms'
|
||||
| 'avg'
|
||||
| 'sum'
|
||||
| 'min'
|
||||
| 'max'
|
||||
| 'cardinality'
|
||||
| 'stats'
|
||||
| 'extended_stats'
|
||||
| 'percentiles'
|
||||
| 'date_histogram'
|
||||
| 'histogram'
|
||||
| 'range'
|
||||
| 'filter'
|
||||
| 'filters'
|
||||
| 'nested'
|
||||
| 'reverse_nested'
|
||||
| 'top_hits';
|
||||
|
||||
/**
|
||||
* Terms aggregation
|
||||
*/
|
||||
export interface TermsAggregation {
|
||||
terms: {
|
||||
field: string;
|
||||
size?: number;
|
||||
order?: Record<string, SortOrder>;
|
||||
missing?: string | number;
|
||||
};
|
||||
aggs?: Record<string, AggregationDSL>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Metric aggregations (avg, sum, min, max, cardinality)
|
||||
*/
|
||||
export interface MetricAggregation {
|
||||
avg?: { field: string; missing?: number };
|
||||
sum?: { field: string; missing?: number };
|
||||
min?: { field: string; missing?: number };
|
||||
max?: { field: string; missing?: number };
|
||||
cardinality?: { field: string };
|
||||
}
|
||||
|
||||
/**
|
||||
* Stats aggregation
|
||||
*/
|
||||
export interface StatsAggregation {
|
||||
stats: {
|
||||
field: string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Extended stats aggregation
|
||||
*/
|
||||
export interface ExtendedStatsAggregation {
|
||||
extended_stats: {
|
||||
field: string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Percentiles aggregation
|
||||
*/
|
||||
export interface PercentilesAggregation {
|
||||
percentiles: {
|
||||
field: string;
|
||||
percents?: number[];
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Date histogram aggregation
|
||||
*/
|
||||
export interface DateHistogramAggregation {
|
||||
date_histogram: {
|
||||
field: string;
|
||||
calendar_interval?: string;
|
||||
fixed_interval?: string;
|
||||
format?: string;
|
||||
time_zone?: string;
|
||||
min_doc_count?: number;
|
||||
};
|
||||
aggs?: Record<string, AggregationDSL>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Histogram aggregation
|
||||
*/
|
||||
export interface HistogramAggregation {
|
||||
histogram: {
|
||||
field: string;
|
||||
interval: number;
|
||||
min_doc_count?: number;
|
||||
};
|
||||
aggs?: Record<string, AggregationDSL>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Range aggregation
|
||||
*/
|
||||
export interface RangeAggregation {
|
||||
range: {
|
||||
field: string;
|
||||
ranges: Array<{ from?: number; to?: number; key?: string }>;
|
||||
};
|
||||
aggs?: Record<string, AggregationDSL>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter aggregation
|
||||
*/
|
||||
export interface FilterAggregation {
|
||||
filter: QueryDSL;
|
||||
aggs?: Record<string, AggregationDSL>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Top hits aggregation
|
||||
*/
|
||||
export interface TopHitsAggregation {
|
||||
top_hits: {
|
||||
size?: number;
|
||||
sort?: Array<SortField | string>;
|
||||
_source?: boolean | { includes?: string[]; excludes?: string[] };
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Union of all aggregation types
|
||||
*/
|
||||
export type AggregationDSL =
|
||||
| TermsAggregation
|
||||
| MetricAggregation
|
||||
| StatsAggregation
|
||||
| ExtendedStatsAggregation
|
||||
| PercentilesAggregation
|
||||
| DateHistogramAggregation
|
||||
| HistogramAggregation
|
||||
| RangeAggregation
|
||||
| FilterAggregation
|
||||
| TopHitsAggregation;
|
||||
|
||||
/**
|
||||
* Search result hit
|
||||
*/
|
||||
export interface SearchHit<T> {
|
||||
_index: string;
|
||||
_id: string;
|
||||
_score: number | null;
|
||||
_source: T;
|
||||
fields?: Record<string, unknown[]>;
|
||||
highlight?: Record<string, string[]>;
|
||||
sort?: Array<string | number>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Aggregation bucket
|
||||
*/
|
||||
export interface AggregationBucket {
|
||||
key: string | number;
|
||||
key_as_string?: string;
|
||||
doc_count: number;
|
||||
[aggName: string]: unknown;
|
||||
}
|
||||
|
||||
/**
|
||||
* Terms aggregation result
|
||||
*/
|
||||
export interface TermsAggregationResult {
|
||||
doc_count_error_upper_bound: number;
|
||||
sum_other_doc_count: number;
|
||||
buckets: AggregationBucket[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Metric aggregation result
|
||||
*/
|
||||
export interface MetricAggregationResult {
|
||||
value: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stats aggregation result
|
||||
*/
|
||||
export interface StatsAggregationResult {
|
||||
count: number;
|
||||
min: number;
|
||||
max: number;
|
||||
avg: number;
|
||||
sum: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Percentiles aggregation result
|
||||
*/
|
||||
export interface PercentilesAggregationResult {
|
||||
values: Record<string, number>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic aggregation result
|
||||
*/
|
||||
export type AggregationResult =
|
||||
| TermsAggregationResult
|
||||
| MetricAggregationResult
|
||||
| StatsAggregationResult
|
||||
| PercentilesAggregationResult
|
||||
| { buckets: AggregationBucket[] }
|
||||
| { value: number }
|
||||
| Record<string, unknown>;
|
||||
|
||||
/**
|
||||
* Search result
|
||||
*/
|
||||
export interface SearchResult<T> {
|
||||
took: number;
|
||||
timed_out: boolean;
|
||||
_shards: {
|
||||
total: number;
|
||||
successful: number;
|
||||
skipped: number;
|
||||
failed: number;
|
||||
};
|
||||
hits: {
|
||||
total: {
|
||||
value: number;
|
||||
relation: 'eq' | 'gte';
|
||||
};
|
||||
max_score: number | null;
|
||||
hits: SearchHit<T>[];
|
||||
};
|
||||
aggregations?: Record<string, AggregationResult>;
|
||||
}
|
||||
31
ts/domain/transactions/index.ts
Normal file
31
ts/domain/transactions/index.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
/**
|
||||
* Transaction Module
|
||||
*
|
||||
* Distributed transactions with ACID-like semantics
|
||||
*/
|
||||
|
||||
// Main classes
|
||||
export {
|
||||
TransactionManager,
|
||||
Transaction,
|
||||
createTransactionManager,
|
||||
} from './transaction-manager.js';
|
||||
|
||||
// Types
|
||||
export type {
|
||||
TransactionIsolationLevel,
|
||||
TransactionState,
|
||||
LockingStrategy,
|
||||
TransactionOperationType,
|
||||
TransactionOperation,
|
||||
TransactionConfig,
|
||||
TransactionContext,
|
||||
TransactionResult,
|
||||
TransactionStats,
|
||||
LockInfo,
|
||||
ConflictResolutionStrategy,
|
||||
ConflictInfo,
|
||||
TransactionManagerConfig,
|
||||
Savepoint,
|
||||
TransactionCallbacks,
|
||||
} from './types.js';
|
||||
859
ts/domain/transactions/transaction-manager.ts
Normal file
859
ts/domain/transactions/transaction-manager.ts
Normal file
@@ -0,0 +1,859 @@
|
||||
/**
|
||||
* Transaction Manager
|
||||
*
|
||||
* Manages distributed transactions with ACID-like semantics
|
||||
*/
|
||||
|
||||
import { ElasticsearchConnectionManager } from '../../core/connection/connection-manager.js';
|
||||
import { Logger, defaultLogger } from '../../core/observability/logger.js';
|
||||
import { MetricsCollector, defaultMetricsCollector } from '../../core/observability/metrics.js';
|
||||
import { DocumentConflictError } from '../../core/errors/index.js';
|
||||
import type {
|
||||
TransactionConfig,
|
||||
TransactionContext,
|
||||
TransactionOperation,
|
||||
TransactionResult,
|
||||
TransactionStats,
|
||||
TransactionState,
|
||||
TransactionManagerConfig,
|
||||
TransactionCallbacks,
|
||||
ConflictInfo,
|
||||
ConflictResolutionStrategy,
|
||||
Savepoint,
|
||||
} from './types.js';
|
||||
|
||||
/**
|
||||
* Default configuration
|
||||
*/
|
||||
const DEFAULT_CONFIG: Required<TransactionManagerConfig> = {
|
||||
defaultIsolationLevel: 'read_committed',
|
||||
defaultLockingStrategy: 'optimistic',
|
||||
defaultTimeout: 30000, // 30 seconds
|
||||
enableCleanup: true,
|
||||
cleanupInterval: 60000, // 1 minute
|
||||
maxConcurrentTransactions: 1000,
|
||||
conflictResolution: 'retry',
|
||||
enableLogging: true,
|
||||
enableMetrics: true,
|
||||
};
|
||||
|
||||
/**
|
||||
* Transaction Manager
|
||||
*/
|
||||
export class TransactionManager {
|
||||
private config: Required<TransactionManagerConfig>;
|
||||
private transactions: Map<string, TransactionContext> = new Map();
|
||||
private stats: TransactionStats;
|
||||
private cleanupTimer?: NodeJS.Timeout;
|
||||
private logger: Logger;
|
||||
private metrics: MetricsCollector;
|
||||
private transactionCounter = 0;
|
||||
|
||||
constructor(config: TransactionManagerConfig = {}) {
|
||||
this.config = { ...DEFAULT_CONFIG, ...config };
|
||||
this.logger = defaultLogger;
|
||||
this.metrics = defaultMetricsCollector;
|
||||
|
||||
this.stats = {
|
||||
totalStarted: 0,
|
||||
totalCommitted: 0,
|
||||
totalRolledBack: 0,
|
||||
totalFailed: 0,
|
||||
totalOperations: 0,
|
||||
totalConflicts: 0,
|
||||
totalRetries: 0,
|
||||
avgDuration: 0,
|
||||
avgOperationsPerTransaction: 0,
|
||||
successRate: 0,
|
||||
activeTransactions: 0,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize transaction manager
|
||||
*/
|
||||
async initialize(): Promise<void> {
|
||||
if (this.config.enableCleanup) {
|
||||
this.startCleanupTimer();
|
||||
}
|
||||
|
||||
this.logger.info('TransactionManager initialized', {
|
||||
defaultIsolationLevel: this.config.defaultIsolationLevel,
|
||||
defaultLockingStrategy: this.config.defaultLockingStrategy,
|
||||
maxConcurrentTransactions: this.config.maxConcurrentTransactions,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Begin a new transaction
|
||||
*/
|
||||
async begin(
|
||||
config: TransactionConfig = {},
|
||||
callbacks?: TransactionCallbacks
|
||||
): Promise<Transaction> {
|
||||
// Check concurrent transaction limit
|
||||
if (this.transactions.size >= this.config.maxConcurrentTransactions) {
|
||||
throw new Error(
|
||||
`Maximum concurrent transactions limit reached (${this.config.maxConcurrentTransactions})`
|
||||
);
|
||||
}
|
||||
|
||||
// Generate transaction ID
|
||||
const transactionId = config.id || this.generateTransactionId();
|
||||
|
||||
// Create transaction context
|
||||
const context: TransactionContext = {
|
||||
id: transactionId,
|
||||
state: 'active',
|
||||
config: {
|
||||
id: transactionId,
|
||||
isolationLevel: config.isolationLevel ?? this.config.defaultIsolationLevel,
|
||||
lockingStrategy: config.lockingStrategy ?? this.config.defaultLockingStrategy,
|
||||
timeout: config.timeout ?? this.config.defaultTimeout,
|
||||
autoRollback: config.autoRollback ?? true,
|
||||
maxRetries: config.maxRetries ?? 3,
|
||||
retryDelay: config.retryDelay ?? 100,
|
||||
strictOrdering: config.strictOrdering ?? false,
|
||||
metadata: config.metadata ?? {},
|
||||
},
|
||||
operations: [],
|
||||
readSet: new Map(),
|
||||
writeSet: new Set(),
|
||||
startTime: new Date(),
|
||||
retryAttempts: 0,
|
||||
};
|
||||
|
||||
this.transactions.set(transactionId, context);
|
||||
this.stats.totalStarted++;
|
||||
this.stats.activeTransactions++;
|
||||
|
||||
if (this.config.enableLogging) {
|
||||
this.logger.info('Transaction started', {
|
||||
transactionId,
|
||||
isolationLevel: context.config.isolationLevel,
|
||||
lockingStrategy: context.config.lockingStrategy,
|
||||
});
|
||||
}
|
||||
|
||||
if (this.config.enableMetrics) {
|
||||
this.metrics.recordCounter('transactions.started', 1);
|
||||
this.metrics.recordGauge('transactions.active', this.stats.activeTransactions);
|
||||
}
|
||||
|
||||
// Call onBegin callback
|
||||
if (callbacks?.onBegin) {
|
||||
await callbacks.onBegin(context);
|
||||
}
|
||||
|
||||
return new Transaction(this, context, callbacks);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get transaction context
|
||||
*/
|
||||
getTransaction(transactionId: string): TransactionContext | undefined {
|
||||
return this.transactions.get(transactionId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Commit a transaction
|
||||
*/
|
||||
async commit(transactionId: string, callbacks?: TransactionCallbacks): Promise<TransactionResult> {
|
||||
const context = this.transactions.get(transactionId);
|
||||
|
||||
if (!context) {
|
||||
throw new Error(`Transaction ${transactionId} not found`);
|
||||
}
|
||||
|
||||
if (context.state !== 'active' && context.state !== 'prepared') {
|
||||
throw new Error(`Cannot commit transaction in state: ${context.state}`);
|
||||
}
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
// Call onBeforeCommit callback
|
||||
if (callbacks?.onBeforeCommit) {
|
||||
await callbacks.onBeforeCommit(context);
|
||||
}
|
||||
|
||||
context.state = 'committing';
|
||||
|
||||
const client = ElasticsearchConnectionManager.getInstance().getClient();
|
||||
|
||||
// Execute and commit all operations
|
||||
let committed = 0;
|
||||
for (const operation of context.operations) {
|
||||
if (operation.committed) {
|
||||
committed++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Execute operation if not yet executed
|
||||
if (!operation.executed) {
|
||||
await this.executeOperation(context, operation, callbacks);
|
||||
}
|
||||
|
||||
// Mark as committed
|
||||
operation.committed = true;
|
||||
committed++;
|
||||
}
|
||||
|
||||
context.state = 'committed';
|
||||
context.endTime = new Date();
|
||||
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
this.stats.totalCommitted++;
|
||||
this.stats.activeTransactions--;
|
||||
this.updateAverages(duration, context.operations.length);
|
||||
|
||||
const result: TransactionResult = {
|
||||
success: true,
|
||||
transactionId,
|
||||
state: 'committed',
|
||||
operationsExecuted: context.operations.filter((op) => op.executed).length,
|
||||
operationsCommitted: committed,
|
||||
operationsRolledBack: 0,
|
||||
duration,
|
||||
metadata: context.config.metadata,
|
||||
};
|
||||
|
||||
if (this.config.enableLogging) {
|
||||
this.logger.info('Transaction committed', {
|
||||
transactionId,
|
||||
operations: committed,
|
||||
duration,
|
||||
});
|
||||
}
|
||||
|
||||
if (this.config.enableMetrics) {
|
||||
this.metrics.recordCounter('transactions.committed', 1);
|
||||
this.metrics.recordHistogram('transactions.duration', duration);
|
||||
this.metrics.recordGauge('transactions.active', this.stats.activeTransactions);
|
||||
}
|
||||
|
||||
// Call onAfterCommit callback
|
||||
if (callbacks?.onAfterCommit) {
|
||||
await callbacks.onAfterCommit(result);
|
||||
}
|
||||
|
||||
// Cleanup transaction
|
||||
this.transactions.delete(transactionId);
|
||||
|
||||
return result;
|
||||
} catch (error: any) {
|
||||
context.state = 'failed';
|
||||
context.error = error;
|
||||
|
||||
if (this.config.enableLogging) {
|
||||
this.logger.error('Transaction commit failed', {
|
||||
transactionId,
|
||||
error: error.message,
|
||||
});
|
||||
}
|
||||
|
||||
// Auto-rollback if enabled
|
||||
if (context.config.autoRollback) {
|
||||
return await this.rollback(transactionId, callbacks);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Rollback a transaction
|
||||
*/
|
||||
async rollback(
|
||||
transactionId: string,
|
||||
callbacks?: TransactionCallbacks
|
||||
): Promise<TransactionResult> {
|
||||
const context = this.transactions.get(transactionId);
|
||||
|
||||
if (!context) {
|
||||
throw new Error(`Transaction ${transactionId} not found`);
|
||||
}
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
// Call onBeforeRollback callback
|
||||
if (callbacks?.onBeforeRollback) {
|
||||
await callbacks.onBeforeRollback(context);
|
||||
}
|
||||
|
||||
context.state = 'rolling_back';
|
||||
|
||||
const client = ElasticsearchConnectionManager.getInstance().getClient();
|
||||
|
||||
// Execute compensation operations in reverse order
|
||||
let rolledBack = 0;
|
||||
for (let i = context.operations.length - 1; i >= 0; i--) {
|
||||
const operation = context.operations[i];
|
||||
|
||||
if (!operation.executed || !operation.compensation) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
await this.executeOperation(context, operation.compensation);
|
||||
rolledBack++;
|
||||
} catch (error: any) {
|
||||
this.logger.error('Compensation operation failed', {
|
||||
transactionId,
|
||||
operation: operation.type,
|
||||
index: operation.index,
|
||||
id: operation.id,
|
||||
error: error.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
context.state = 'rolled_back';
|
||||
context.endTime = new Date();
|
||||
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
this.stats.totalRolledBack++;
|
||||
this.stats.activeTransactions--;
|
||||
|
||||
const result: TransactionResult = {
|
||||
success: false,
|
||||
transactionId,
|
||||
state: 'rolled_back',
|
||||
operationsExecuted: context.operations.filter((op) => op.executed).length,
|
||||
operationsCommitted: 0,
|
||||
operationsRolledBack: rolledBack,
|
||||
duration,
|
||||
error: context.error
|
||||
? {
|
||||
message: context.error.message,
|
||||
type: context.error.name,
|
||||
}
|
||||
: undefined,
|
||||
metadata: context.config.metadata,
|
||||
};
|
||||
|
||||
if (this.config.enableLogging) {
|
||||
this.logger.info('Transaction rolled back', {
|
||||
transactionId,
|
||||
rolledBack,
|
||||
duration,
|
||||
});
|
||||
}
|
||||
|
||||
if (this.config.enableMetrics) {
|
||||
this.metrics.recordCounter('transactions.rolled_back', 1);
|
||||
this.metrics.recordGauge('transactions.active', this.stats.activeTransactions);
|
||||
}
|
||||
|
||||
// Call onAfterRollback callback
|
||||
if (callbacks?.onAfterRollback) {
|
||||
await callbacks.onAfterRollback(result);
|
||||
}
|
||||
|
||||
// Cleanup transaction
|
||||
this.transactions.delete(transactionId);
|
||||
|
||||
return result;
|
||||
} catch (error: any) {
|
||||
context.state = 'failed';
|
||||
context.error = error;
|
||||
this.stats.totalFailed++;
|
||||
|
||||
if (this.config.enableLogging) {
|
||||
this.logger.error('Transaction rollback failed', {
|
||||
transactionId,
|
||||
error: error.message,
|
||||
});
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get transaction statistics
|
||||
*/
|
||||
getStats(): TransactionStats {
|
||||
return { ...this.stats };
|
||||
}
|
||||
|
||||
/**
|
||||
* Destroy transaction manager
|
||||
*/
|
||||
async destroy(): Promise<void> {
|
||||
if (this.cleanupTimer) {
|
||||
clearInterval(this.cleanupTimer);
|
||||
}
|
||||
|
||||
// Rollback all active transactions
|
||||
const activeTransactions = Array.from(this.transactions.keys());
|
||||
for (const transactionId of activeTransactions) {
|
||||
try {
|
||||
await this.rollback(transactionId);
|
||||
} catch (error) {
|
||||
// Ignore errors during cleanup
|
||||
}
|
||||
}
|
||||
|
||||
this.transactions.clear();
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Internal Methods
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Add operation to transaction
|
||||
*/
|
||||
addOperation(context: TransactionContext, operation: TransactionOperation): void {
|
||||
context.operations.push(operation);
|
||||
this.stats.totalOperations++;
|
||||
|
||||
const key = `${operation.index}:${operation.id}`;
|
||||
|
||||
if (operation.type === 'read') {
|
||||
// Add to read set for repeatable read
|
||||
if (operation.version) {
|
||||
context.readSet.set(key, operation.version);
|
||||
}
|
||||
} else {
|
||||
// Add to write set
|
||||
context.writeSet.add(key);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute an operation
|
||||
*/
|
||||
private async executeOperation(
|
||||
context: TransactionContext,
|
||||
operation: TransactionOperation,
|
||||
callbacks?: TransactionCallbacks
|
||||
): Promise<void> {
|
||||
// Call onBeforeOperation callback
|
||||
if (callbacks?.onBeforeOperation) {
|
||||
await callbacks.onBeforeOperation(operation);
|
||||
}
|
||||
|
||||
const client = ElasticsearchConnectionManager.getInstance().getClient();
|
||||
|
||||
try {
|
||||
switch (operation.type) {
|
||||
case 'read': {
|
||||
const result = await client.get({
|
||||
index: operation.index,
|
||||
id: operation.id,
|
||||
});
|
||||
|
||||
operation.version = {
|
||||
seqNo: result._seq_no!,
|
||||
primaryTerm: result._primary_term!,
|
||||
};
|
||||
|
||||
operation.originalDocument = result._source;
|
||||
break;
|
||||
}
|
||||
|
||||
case 'create': {
|
||||
const result = await client.index({
|
||||
index: operation.index,
|
||||
id: operation.id,
|
||||
document: operation.document,
|
||||
op_type: 'create',
|
||||
});
|
||||
|
||||
operation.version = {
|
||||
seqNo: result._seq_no,
|
||||
primaryTerm: result._primary_term,
|
||||
};
|
||||
|
||||
// Create compensation (delete)
|
||||
operation.compensation = {
|
||||
type: 'delete',
|
||||
index: operation.index,
|
||||
id: operation.id,
|
||||
timestamp: new Date(),
|
||||
executed: false,
|
||||
committed: false,
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'update': {
|
||||
const updateRequest: any = {
|
||||
index: operation.index,
|
||||
id: operation.id,
|
||||
document: operation.document,
|
||||
};
|
||||
|
||||
// Add version for optimistic locking
|
||||
if (operation.version) {
|
||||
updateRequest.if_seq_no = operation.version.seqNo;
|
||||
updateRequest.if_primary_term = operation.version.primaryTerm;
|
||||
}
|
||||
|
||||
const result = await client.index(updateRequest);
|
||||
|
||||
operation.version = {
|
||||
seqNo: result._seq_no,
|
||||
primaryTerm: result._primary_term,
|
||||
};
|
||||
|
||||
// Create compensation (restore original)
|
||||
if (operation.originalDocument) {
|
||||
operation.compensation = {
|
||||
type: 'update',
|
||||
index: operation.index,
|
||||
id: operation.id,
|
||||
document: operation.originalDocument,
|
||||
timestamp: new Date(),
|
||||
executed: false,
|
||||
committed: false,
|
||||
};
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'delete': {
|
||||
const deleteRequest: any = {
|
||||
index: operation.index,
|
||||
id: operation.id,
|
||||
};
|
||||
|
||||
// Add version for optimistic locking
|
||||
if (operation.version) {
|
||||
deleteRequest.if_seq_no = operation.version.seqNo;
|
||||
deleteRequest.if_primary_term = operation.version.primaryTerm;
|
||||
}
|
||||
|
||||
await client.delete(deleteRequest);
|
||||
|
||||
// Create compensation (restore document)
|
||||
if (operation.originalDocument) {
|
||||
operation.compensation = {
|
||||
type: 'create',
|
||||
index: operation.index,
|
||||
id: operation.id,
|
||||
document: operation.originalDocument,
|
||||
timestamp: new Date(),
|
||||
executed: false,
|
||||
committed: false,
|
||||
};
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
operation.executed = true;
|
||||
|
||||
// Call onAfterOperation callback
|
||||
if (callbacks?.onAfterOperation) {
|
||||
await callbacks.onAfterOperation(operation);
|
||||
}
|
||||
} catch (error: any) {
|
||||
// Handle version conflict
|
||||
if (error.name === 'ResponseError' && error.meta?.statusCode === 409) {
|
||||
await this.handleConflict(context, operation, error, callbacks);
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle version conflict
|
||||
*/
|
||||
private async handleConflict(
|
||||
context: TransactionContext,
|
||||
operation: TransactionOperation,
|
||||
error: Error,
|
||||
callbacks?: TransactionCallbacks
|
||||
): Promise<void> {
|
||||
this.stats.totalConflicts++;
|
||||
|
||||
const conflict: ConflictInfo = {
|
||||
operation,
|
||||
expectedVersion: operation.version,
|
||||
detectedAt: new Date(),
|
||||
};
|
||||
|
||||
if (this.config.enableMetrics) {
|
||||
this.metrics.recordCounter('transactions.conflicts', 1);
|
||||
}
|
||||
|
||||
// Call onConflict callback
|
||||
let strategy: ConflictResolutionStrategy = this.config.conflictResolution;
|
||||
if (callbacks?.onConflict) {
|
||||
strategy = await callbacks.onConflict(conflict);
|
||||
}
|
||||
|
||||
switch (strategy) {
|
||||
case 'abort':
|
||||
throw new DocumentConflictError(
|
||||
`Version conflict for ${operation.index}/${operation.id}`,
|
||||
{ index: operation.index, id: operation.id }
|
||||
);
|
||||
|
||||
case 'retry':
|
||||
if (context.retryAttempts >= context.config.maxRetries) {
|
||||
throw new DocumentConflictError(
|
||||
`Max retries exceeded for ${operation.index}/${operation.id}`,
|
||||
{ index: operation.index, id: operation.id }
|
||||
);
|
||||
}
|
||||
|
||||
context.retryAttempts++;
|
||||
this.stats.totalRetries++;
|
||||
|
||||
// Wait before retry
|
||||
await new Promise((resolve) => setTimeout(resolve, context.config.retryDelay));
|
||||
|
||||
// Retry operation
|
||||
await this.executeOperation(context, operation, callbacks);
|
||||
break;
|
||||
|
||||
case 'skip':
|
||||
// Skip this operation
|
||||
operation.executed = false;
|
||||
break;
|
||||
|
||||
case 'force':
|
||||
// Force update without version check
|
||||
delete operation.version;
|
||||
await this.executeOperation(context, operation, callbacks);
|
||||
break;
|
||||
|
||||
case 'merge':
|
||||
// Not implemented - requires custom merge logic
|
||||
throw new Error('Merge conflict resolution not implemented');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate transaction ID
|
||||
*/
|
||||
private generateTransactionId(): string {
|
||||
this.transactionCounter++;
|
||||
return `txn-${Date.now()}-${this.transactionCounter}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start cleanup timer for expired transactions
|
||||
*/
|
||||
private startCleanupTimer(): void {
|
||||
this.cleanupTimer = setInterval(() => {
|
||||
this.cleanupExpiredTransactions();
|
||||
}, this.config.cleanupInterval);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup expired transactions
|
||||
*/
|
||||
private cleanupExpiredTransactions(): void {
|
||||
const now = Date.now();
|
||||
|
||||
for (const [transactionId, context] of this.transactions) {
|
||||
const elapsed = now - context.startTime.getTime();
|
||||
|
||||
if (elapsed > context.config.timeout) {
|
||||
this.logger.warn('Transaction timeout, rolling back', { transactionId });
|
||||
|
||||
this.rollback(transactionId).catch((error) => {
|
||||
this.logger.error('Failed to rollback expired transaction', {
|
||||
transactionId,
|
||||
error,
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update average statistics
|
||||
*/
|
||||
private updateAverages(duration: number, operations: number): void {
|
||||
const total = this.stats.totalCommitted + this.stats.totalRolledBack;
|
||||
|
||||
this.stats.avgDuration =
|
||||
(this.stats.avgDuration * (total - 1) + duration) / total;
|
||||
|
||||
this.stats.avgOperationsPerTransaction =
|
||||
(this.stats.avgOperationsPerTransaction * (total - 1) + operations) / total;
|
||||
|
||||
this.stats.successRate =
|
||||
this.stats.totalCommitted / (this.stats.totalCommitted + this.stats.totalRolledBack + this.stats.totalFailed);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transaction class for fluent API
|
||||
*/
|
||||
export class Transaction {
|
||||
private savepoints: Map<string, Savepoint> = new Map();
|
||||
|
||||
constructor(
|
||||
private manager: TransactionManager,
|
||||
private context: TransactionContext,
|
||||
private callbacks?: TransactionCallbacks
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Get transaction ID
|
||||
*/
|
||||
getId(): string {
|
||||
return this.context.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get transaction state
|
||||
*/
|
||||
getState(): TransactionState {
|
||||
return this.context.state;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a document
|
||||
*/
|
||||
async read<T>(index: string, id: string): Promise<T | null> {
|
||||
const operation: TransactionOperation<T> = {
|
||||
type: 'read',
|
||||
index,
|
||||
id,
|
||||
timestamp: new Date(),
|
||||
executed: false,
|
||||
committed: false,
|
||||
};
|
||||
|
||||
this.manager.addOperation(this.context, operation);
|
||||
|
||||
const client = ElasticsearchConnectionManager.getInstance().getClient();
|
||||
|
||||
try {
|
||||
const result = await client.get({ index, id });
|
||||
|
||||
operation.version = {
|
||||
seqNo: result._seq_no!,
|
||||
primaryTerm: result._primary_term!,
|
||||
};
|
||||
|
||||
operation.originalDocument = result._source as T;
|
||||
operation.executed = true;
|
||||
|
||||
return result._source as T;
|
||||
} catch (error: any) {
|
||||
if (error.name === 'ResponseError' && error.meta?.statusCode === 404) {
|
||||
return null;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a document
|
||||
*/
|
||||
async create<T>(index: string, id: string, document: T): Promise<void> {
|
||||
const operation: TransactionOperation<T> = {
|
||||
type: 'create',
|
||||
index,
|
||||
id,
|
||||
document,
|
||||
timestamp: new Date(),
|
||||
executed: false,
|
||||
committed: false,
|
||||
};
|
||||
|
||||
this.manager.addOperation(this.context, operation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a document
|
||||
*/
|
||||
async update<T>(index: string, id: string, document: Partial<T>): Promise<void> {
|
||||
// First read the current version
|
||||
const current = await this.read<T>(index, id);
|
||||
|
||||
const operation: TransactionOperation<T> = {
|
||||
type: 'update',
|
||||
index,
|
||||
id,
|
||||
document: { ...current, ...document } as T,
|
||||
originalDocument: current ?? undefined,
|
||||
timestamp: new Date(),
|
||||
executed: false,
|
||||
committed: false,
|
||||
};
|
||||
|
||||
this.manager.addOperation(this.context, operation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a document
|
||||
*/
|
||||
async delete(index: string, id: string): Promise<void> {
|
||||
// First read the current version
|
||||
const current = await this.read(index, id);
|
||||
|
||||
const operation: TransactionOperation = {
|
||||
type: 'delete',
|
||||
index,
|
||||
id,
|
||||
originalDocument: current ?? undefined,
|
||||
timestamp: new Date(),
|
||||
executed: false,
|
||||
committed: false,
|
||||
};
|
||||
|
||||
this.manager.addOperation(this.context, operation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a savepoint
|
||||
*/
|
||||
savepoint(name: string): void {
|
||||
this.savepoints.set(name, {
|
||||
name,
|
||||
operationsCount: this.context.operations.length,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Rollback to savepoint
|
||||
*/
|
||||
rollbackTo(name: string): void {
|
||||
const savepoint = this.savepoints.get(name);
|
||||
|
||||
if (!savepoint) {
|
||||
throw new Error(`Savepoint '${name}' not found`);
|
||||
}
|
||||
|
||||
// Remove operations after savepoint
|
||||
this.context.operations.splice(savepoint.operationsCount);
|
||||
}
|
||||
|
||||
/**
|
||||
* Commit the transaction
|
||||
*/
|
||||
async commit(): Promise<TransactionResult> {
|
||||
return await this.manager.commit(this.context.id, this.callbacks);
|
||||
}
|
||||
|
||||
/**
|
||||
* Rollback the transaction
|
||||
*/
|
||||
async rollback(): Promise<TransactionResult> {
|
||||
return await this.manager.rollback(this.context.id, this.callbacks);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a transaction manager
|
||||
*/
|
||||
export function createTransactionManager(
|
||||
config?: TransactionManagerConfig
|
||||
): TransactionManager {
|
||||
return new TransactionManager(config);
|
||||
}
|
||||
361
ts/domain/transactions/types.ts
Normal file
361
ts/domain/transactions/types.ts
Normal file
@@ -0,0 +1,361 @@
|
||||
/**
|
||||
* Transaction types for distributed ACID-like operations
|
||||
*
|
||||
* Note: Elasticsearch doesn't natively support ACID transactions across multiple
|
||||
* documents. This implementation provides transaction-like semantics using:
|
||||
* - Optimistic concurrency control (seq_no/primary_term)
|
||||
* - Two-phase operations (prepare/commit)
|
||||
* - Compensation-based rollback
|
||||
* - Transaction state tracking
|
||||
*/
|
||||
|
||||
/**
|
||||
* Transaction isolation level
|
||||
*/
|
||||
export type TransactionIsolationLevel =
|
||||
| 'read_uncommitted'
|
||||
| 'read_committed'
|
||||
| 'repeatable_read'
|
||||
| 'serializable';
|
||||
|
||||
/**
|
||||
* Transaction state
|
||||
*/
|
||||
export type TransactionState =
|
||||
| 'active'
|
||||
| 'preparing'
|
||||
| 'prepared'
|
||||
| 'committing'
|
||||
| 'committed'
|
||||
| 'rolling_back'
|
||||
| 'rolled_back'
|
||||
| 'failed';
|
||||
|
||||
/**
|
||||
* Transaction locking strategy
|
||||
*/
|
||||
export type LockingStrategy = 'optimistic' | 'pessimistic';
|
||||
|
||||
/**
|
||||
* Transaction operation type
|
||||
*/
|
||||
export type TransactionOperationType = 'read' | 'create' | 'update' | 'delete';
|
||||
|
||||
/**
|
||||
* Transaction operation
|
||||
*/
|
||||
export interface TransactionOperation<T = unknown> {
|
||||
/** Operation type */
|
||||
type: TransactionOperationType;
|
||||
|
||||
/** Target index */
|
||||
index: string;
|
||||
|
||||
/** Document ID */
|
||||
id: string;
|
||||
|
||||
/** Document data (for create/update) */
|
||||
document?: T;
|
||||
|
||||
/** Original document (for rollback) */
|
||||
originalDocument?: T;
|
||||
|
||||
/** Version info for optimistic locking */
|
||||
version?: {
|
||||
seqNo: number;
|
||||
primaryTerm: number;
|
||||
};
|
||||
|
||||
/** Timestamp when operation was added */
|
||||
timestamp: Date;
|
||||
|
||||
/** Whether operation has been executed */
|
||||
executed: boolean;
|
||||
|
||||
/** Whether operation has been committed */
|
||||
committed: boolean;
|
||||
|
||||
/** Compensation operation for rollback */
|
||||
compensation?: TransactionOperation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transaction configuration
|
||||
*/
|
||||
export interface TransactionConfig {
|
||||
/** Transaction ID (auto-generated if not provided) */
|
||||
id?: string;
|
||||
|
||||
/** Isolation level */
|
||||
isolationLevel?: TransactionIsolationLevel;
|
||||
|
||||
/** Locking strategy */
|
||||
lockingStrategy?: LockingStrategy;
|
||||
|
||||
/** Transaction timeout in milliseconds */
|
||||
timeout?: number;
|
||||
|
||||
/** Enable automatic rollback on error */
|
||||
autoRollback?: boolean;
|
||||
|
||||
/** Maximum retry attempts for conflicts */
|
||||
maxRetries?: number;
|
||||
|
||||
/** Retry delay in milliseconds */
|
||||
retryDelay?: number;
|
||||
|
||||
/** Enable strict ordering of operations */
|
||||
strictOrdering?: boolean;
|
||||
|
||||
/** Metadata for tracking */
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transaction context
|
||||
*/
|
||||
export interface TransactionContext {
|
||||
/** Transaction ID */
|
||||
id: string;
|
||||
|
||||
/** Current state */
|
||||
state: TransactionState;
|
||||
|
||||
/** Configuration */
|
||||
config: Required<TransactionConfig>;
|
||||
|
||||
/** Operations in this transaction */
|
||||
operations: TransactionOperation[];
|
||||
|
||||
/** Read set (for repeatable read isolation) */
|
||||
readSet: Map<string, { seqNo: number; primaryTerm: number }>;
|
||||
|
||||
/** Write set (for conflict detection) */
|
||||
writeSet: Set<string>;
|
||||
|
||||
/** Transaction start time */
|
||||
startTime: Date;
|
||||
|
||||
/** Transaction end time */
|
||||
endTime?: Date;
|
||||
|
||||
/** Error if transaction failed */
|
||||
error?: Error;
|
||||
|
||||
/** Number of retry attempts */
|
||||
retryAttempts: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transaction result
|
||||
*/
|
||||
export interface TransactionResult {
|
||||
/** Whether transaction succeeded */
|
||||
success: boolean;
|
||||
|
||||
/** Transaction ID */
|
||||
transactionId: string;
|
||||
|
||||
/** Final state */
|
||||
state: TransactionState;
|
||||
|
||||
/** Number of operations executed */
|
||||
operationsExecuted: number;
|
||||
|
||||
/** Number of operations committed */
|
||||
operationsCommitted: number;
|
||||
|
||||
/** Number of operations rolled back */
|
||||
operationsRolledBack: number;
|
||||
|
||||
/** Transaction duration in milliseconds */
|
||||
duration: number;
|
||||
|
||||
/** Error if transaction failed */
|
||||
error?: {
|
||||
message: string;
|
||||
type: string;
|
||||
operation?: TransactionOperation;
|
||||
};
|
||||
|
||||
/** Metadata */
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transaction statistics
|
||||
*/
|
||||
export interface TransactionStats {
|
||||
/** Total transactions started */
|
||||
totalStarted: number;
|
||||
|
||||
/** Total transactions committed */
|
||||
totalCommitted: number;
|
||||
|
||||
/** Total transactions rolled back */
|
||||
totalRolledBack: number;
|
||||
|
||||
/** Total transactions failed */
|
||||
totalFailed: number;
|
||||
|
||||
/** Total operations executed */
|
||||
totalOperations: number;
|
||||
|
||||
/** Total conflicts encountered */
|
||||
totalConflicts: number;
|
||||
|
||||
/** Total retries */
|
||||
totalRetries: number;
|
||||
|
||||
/** Average transaction duration */
|
||||
avgDuration: number;
|
||||
|
||||
/** Average operations per transaction */
|
||||
avgOperationsPerTransaction: number;
|
||||
|
||||
/** Success rate */
|
||||
successRate: number;
|
||||
|
||||
/** Active transactions count */
|
||||
activeTransactions: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Lock information
|
||||
*/
|
||||
export interface LockInfo {
|
||||
/** Document key (index:id) */
|
||||
key: string;
|
||||
|
||||
/** Transaction ID holding the lock */
|
||||
transactionId: string;
|
||||
|
||||
/** Lock type */
|
||||
type: 'read' | 'write';
|
||||
|
||||
/** Lock acquired at */
|
||||
acquiredAt: Date;
|
||||
|
||||
/** Lock expires at */
|
||||
expiresAt: Date;
|
||||
|
||||
/** Lock metadata */
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Conflict resolution strategy
|
||||
*/
|
||||
export type ConflictResolutionStrategy =
|
||||
| 'abort' // Abort transaction
|
||||
| 'retry' // Retry operation
|
||||
| 'skip' // Skip conflicting operation
|
||||
| 'force' // Force operation (last write wins)
|
||||
| 'merge'; // Attempt to merge changes
|
||||
|
||||
/**
|
||||
* Conflict information
|
||||
*/
|
||||
export interface ConflictInfo {
|
||||
/** Operation that conflicted */
|
||||
operation: TransactionOperation;
|
||||
|
||||
/** Conflicting transaction ID */
|
||||
conflictingTransactionId?: string;
|
||||
|
||||
/** Expected version */
|
||||
expectedVersion?: {
|
||||
seqNo: number;
|
||||
primaryTerm: number;
|
||||
};
|
||||
|
||||
/** Actual version */
|
||||
actualVersion?: {
|
||||
seqNo: number;
|
||||
primaryTerm: number;
|
||||
};
|
||||
|
||||
/** Conflict detected at */
|
||||
detectedAt: Date;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transaction manager configuration
|
||||
*/
|
||||
export interface TransactionManagerConfig {
|
||||
/** Default isolation level */
|
||||
defaultIsolationLevel?: TransactionIsolationLevel;
|
||||
|
||||
/** Default locking strategy */
|
||||
defaultLockingStrategy?: LockingStrategy;
|
||||
|
||||
/** Default transaction timeout */
|
||||
defaultTimeout?: number;
|
||||
|
||||
/** Enable automatic cleanup of expired transactions */
|
||||
enableCleanup?: boolean;
|
||||
|
||||
/** Cleanup interval in milliseconds */
|
||||
cleanupInterval?: number;
|
||||
|
||||
/** Maximum concurrent transactions */
|
||||
maxConcurrentTransactions?: number;
|
||||
|
||||
/** Conflict resolution strategy */
|
||||
conflictResolution?: ConflictResolutionStrategy;
|
||||
|
||||
/** Enable transaction logging */
|
||||
enableLogging?: boolean;
|
||||
|
||||
/** Enable transaction metrics */
|
||||
enableMetrics?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Savepoint for nested transactions
|
||||
*/
|
||||
export interface Savepoint {
|
||||
/** Savepoint name */
|
||||
name: string;
|
||||
|
||||
/** Operations count at savepoint */
|
||||
operationsCount: number;
|
||||
|
||||
/** Created at */
|
||||
createdAt: Date;
|
||||
|
||||
/** Metadata */
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transaction callback functions
|
||||
*/
|
||||
export interface TransactionCallbacks {
|
||||
/** Called before transaction begins */
|
||||
onBegin?: (context: TransactionContext) => Promise<void> | void;
|
||||
|
||||
/** Called before operation executes */
|
||||
onBeforeOperation?: (operation: TransactionOperation) => Promise<void> | void;
|
||||
|
||||
/** Called after operation executes */
|
||||
onAfterOperation?: (operation: TransactionOperation) => Promise<void> | void;
|
||||
|
||||
/** Called on conflict */
|
||||
onConflict?: (conflict: ConflictInfo) => Promise<ConflictResolutionStrategy> | ConflictResolutionStrategy;
|
||||
|
||||
/** Called before commit */
|
||||
onBeforeCommit?: (context: TransactionContext) => Promise<void> | void;
|
||||
|
||||
/** Called after commit */
|
||||
onAfterCommit?: (result: TransactionResult) => Promise<void> | void;
|
||||
|
||||
/** Called before rollback */
|
||||
onBeforeRollback?: (context: TransactionContext) => Promise<void> | void;
|
||||
|
||||
/** Called after rollback */
|
||||
onAfterRollback?: (result: TransactionResult) => Promise<void> | void;
|
||||
|
||||
/** Called on transaction error */
|
||||
onError?: (error: Error, context: TransactionContext) => Promise<void> | void;
|
||||
}
|
||||
Reference in New Issue
Block a user