feat(tsmdb): implement TsmDB Mongo-wire-compatible server, add storage/engine modules and reorganize exports
This commit is contained in:
283
ts/ts_tsmdb/engine/AggregationEngine.ts
Normal file
283
ts/ts_tsmdb/engine/AggregationEngine.ts
Normal file
@@ -0,0 +1,283 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import type { Document, IStoredDocument, IAggregateOptions } from '../types/interfaces.js';
|
||||
|
||||
// Import mingo Aggregator
|
||||
import { Aggregator } from 'mingo';
|
||||
|
||||
/**
|
||||
* Aggregation engine using mingo for MongoDB-compatible aggregation pipeline execution
|
||||
*/
|
||||
export class AggregationEngine {
|
||||
/**
|
||||
* Execute an aggregation pipeline on a collection of documents
|
||||
*/
|
||||
static aggregate(
|
||||
documents: IStoredDocument[],
|
||||
pipeline: Document[],
|
||||
options?: IAggregateOptions
|
||||
): Document[] {
|
||||
if (!pipeline || pipeline.length === 0) {
|
||||
return documents;
|
||||
}
|
||||
|
||||
// Create mingo aggregator with the pipeline
|
||||
const aggregator = new Aggregator(pipeline, {
|
||||
collation: options?.collation as any,
|
||||
});
|
||||
|
||||
// Run the aggregation
|
||||
const result = aggregator.run(documents);
|
||||
|
||||
return Array.isArray(result) ? result : [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute aggregation and return an iterator for lazy evaluation
|
||||
*/
|
||||
static *aggregateIterator(
|
||||
documents: IStoredDocument[],
|
||||
pipeline: Document[],
|
||||
options?: IAggregateOptions
|
||||
): Generator<Document> {
|
||||
const aggregator = new Aggregator(pipeline, {
|
||||
collation: options?.collation as any,
|
||||
});
|
||||
|
||||
// Get the cursor from mingo
|
||||
const cursor = aggregator.stream(documents);
|
||||
|
||||
for (const doc of cursor) {
|
||||
yield doc;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a $lookup stage manually (for cross-collection lookups)
|
||||
* This is used when the lookup references another collection in the same database
|
||||
*/
|
||||
static executeLookup(
|
||||
documents: IStoredDocument[],
|
||||
lookupSpec: {
|
||||
from: string;
|
||||
localField: string;
|
||||
foreignField: string;
|
||||
as: string;
|
||||
},
|
||||
foreignCollection: IStoredDocument[]
|
||||
): Document[] {
|
||||
const { localField, foreignField, as } = lookupSpec;
|
||||
|
||||
return documents.map(doc => {
|
||||
const localValue = this.getNestedValue(doc, localField);
|
||||
const matches = foreignCollection.filter(foreignDoc => {
|
||||
const foreignValue = this.getNestedValue(foreignDoc, foreignField);
|
||||
return this.valuesMatch(localValue, foreignValue);
|
||||
});
|
||||
|
||||
return {
|
||||
...doc,
|
||||
[as]: matches,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a $graphLookup stage manually
|
||||
*/
|
||||
static executeGraphLookup(
|
||||
documents: IStoredDocument[],
|
||||
graphLookupSpec: {
|
||||
from: string;
|
||||
startWith: string | Document;
|
||||
connectFromField: string;
|
||||
connectToField: string;
|
||||
as: string;
|
||||
maxDepth?: number;
|
||||
depthField?: string;
|
||||
restrictSearchWithMatch?: Document;
|
||||
},
|
||||
foreignCollection: IStoredDocument[]
|
||||
): Document[] {
|
||||
const {
|
||||
startWith,
|
||||
connectFromField,
|
||||
connectToField,
|
||||
as,
|
||||
maxDepth = 10,
|
||||
depthField,
|
||||
restrictSearchWithMatch,
|
||||
} = graphLookupSpec;
|
||||
|
||||
return documents.map(doc => {
|
||||
const startValue = typeof startWith === 'string' && startWith.startsWith('$')
|
||||
? this.getNestedValue(doc, startWith.slice(1))
|
||||
: startWith;
|
||||
|
||||
const results: Document[] = [];
|
||||
const visited = new Set<string>();
|
||||
const queue: Array<{ value: any; depth: number }> = [];
|
||||
|
||||
// Initialize with start value(s)
|
||||
const startValues = Array.isArray(startValue) ? startValue : [startValue];
|
||||
for (const val of startValues) {
|
||||
queue.push({ value: val, depth: 0 });
|
||||
}
|
||||
|
||||
while (queue.length > 0) {
|
||||
const { value, depth } = queue.shift()!;
|
||||
if (depth > maxDepth) continue;
|
||||
|
||||
const valueKey = JSON.stringify(value);
|
||||
if (visited.has(valueKey)) continue;
|
||||
visited.add(valueKey);
|
||||
|
||||
// Find matching documents
|
||||
for (const foreignDoc of foreignCollection) {
|
||||
const foreignValue = this.getNestedValue(foreignDoc, connectToField);
|
||||
|
||||
if (this.valuesMatch(value, foreignValue)) {
|
||||
// Check restrictSearchWithMatch
|
||||
if (restrictSearchWithMatch) {
|
||||
const matchQuery = new plugins.mingo.Query(restrictSearchWithMatch);
|
||||
if (!matchQuery.test(foreignDoc)) continue;
|
||||
}
|
||||
|
||||
const resultDoc = depthField
|
||||
? { ...foreignDoc, [depthField]: depth }
|
||||
: { ...foreignDoc };
|
||||
|
||||
// Avoid duplicates in results
|
||||
const docKey = foreignDoc._id.toHexString();
|
||||
if (!results.some(r => r._id?.toHexString?.() === docKey)) {
|
||||
results.push(resultDoc);
|
||||
|
||||
// Add connected values to queue
|
||||
const nextValue = this.getNestedValue(foreignDoc, connectFromField);
|
||||
if (nextValue !== undefined) {
|
||||
const nextValues = Array.isArray(nextValue) ? nextValue : [nextValue];
|
||||
for (const nv of nextValues) {
|
||||
queue.push({ value: nv, depth: depth + 1 });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...doc,
|
||||
[as]: results,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a $facet stage manually
|
||||
*/
|
||||
static executeFacet(
|
||||
documents: IStoredDocument[],
|
||||
facetSpec: Record<string, Document[]>
|
||||
): Document {
|
||||
const result: Document = {};
|
||||
|
||||
for (const [facetName, pipeline] of Object.entries(facetSpec)) {
|
||||
result[facetName] = this.aggregate(documents, pipeline);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a $unionWith stage
|
||||
*/
|
||||
static executeUnionWith(
|
||||
documents: IStoredDocument[],
|
||||
otherDocuments: IStoredDocument[],
|
||||
pipeline?: Document[]
|
||||
): Document[] {
|
||||
let unionDocs: Document[] = otherDocuments;
|
||||
if (pipeline && pipeline.length > 0) {
|
||||
unionDocs = this.aggregate(otherDocuments, pipeline);
|
||||
}
|
||||
return [...documents, ...unionDocs];
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a $merge stage (output to another collection)
|
||||
* Returns the documents that would be inserted/updated
|
||||
*/
|
||||
static prepareMerge(
|
||||
documents: Document[],
|
||||
mergeSpec: {
|
||||
into: string;
|
||||
on?: string | string[];
|
||||
whenMatched?: 'replace' | 'keepExisting' | 'merge' | 'fail' | Document[];
|
||||
whenNotMatched?: 'insert' | 'discard' | 'fail';
|
||||
}
|
||||
): {
|
||||
toInsert: Document[];
|
||||
toUpdate: Array<{ filter: Document; update: Document }>;
|
||||
onField: string | string[];
|
||||
whenMatched: string | Document[];
|
||||
whenNotMatched: string;
|
||||
} {
|
||||
const onField = mergeSpec.on || '_id';
|
||||
const whenMatched = mergeSpec.whenMatched || 'merge';
|
||||
const whenNotMatched = mergeSpec.whenNotMatched || 'insert';
|
||||
|
||||
return {
|
||||
toInsert: [],
|
||||
toUpdate: [],
|
||||
onField,
|
||||
whenMatched,
|
||||
whenNotMatched,
|
||||
};
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Helper Methods
|
||||
// ============================================================================
|
||||
|
||||
private static getNestedValue(obj: any, path: string): any {
|
||||
const parts = path.split('.');
|
||||
let current = obj;
|
||||
|
||||
for (const part of parts) {
|
||||
if (current === null || current === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
current = current[part];
|
||||
}
|
||||
|
||||
return current;
|
||||
}
|
||||
|
||||
private static valuesMatch(a: any, b: any): boolean {
|
||||
if (a === b) return true;
|
||||
|
||||
// Handle ObjectId comparison
|
||||
if (a instanceof plugins.bson.ObjectId && b instanceof plugins.bson.ObjectId) {
|
||||
return a.equals(b);
|
||||
}
|
||||
|
||||
// Handle array contains check
|
||||
if (Array.isArray(a)) {
|
||||
return a.some(item => this.valuesMatch(item, b));
|
||||
}
|
||||
if (Array.isArray(b)) {
|
||||
return b.some(item => this.valuesMatch(a, item));
|
||||
}
|
||||
|
||||
// Handle Date comparison
|
||||
if (a instanceof Date && b instanceof Date) {
|
||||
return a.getTime() === b.getTime();
|
||||
}
|
||||
|
||||
// Handle object comparison
|
||||
if (typeof a === 'object' && typeof b === 'object' && a !== null && b !== null) {
|
||||
return JSON.stringify(a) === JSON.stringify(b);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
798
ts/ts_tsmdb/engine/IndexEngine.ts
Normal file
798
ts/ts_tsmdb/engine/IndexEngine.ts
Normal file
@@ -0,0 +1,798 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import type { IStorageAdapter } from '../storage/IStorageAdapter.js';
|
||||
|
||||
// Simple B-Tree implementation for range queries
|
||||
// Since sorted-btree has ESM/CJS interop issues, we use a simple custom implementation
|
||||
class SimpleBTree<K, V> {
|
||||
private entries: Map<string, { key: K; value: V }> = new Map();
|
||||
private sortedKeys: K[] = [];
|
||||
private comparator: (a: K, b: K) => number;
|
||||
|
||||
constructor(_unused?: undefined, comparator?: (a: K, b: K) => number) {
|
||||
this.comparator = comparator || ((a: K, b: K) => {
|
||||
if (a < b) return -1;
|
||||
if (a > b) return 1;
|
||||
return 0;
|
||||
});
|
||||
}
|
||||
|
||||
private keyToString(key: K): string {
|
||||
return JSON.stringify(key);
|
||||
}
|
||||
|
||||
set(key: K, value: V): boolean {
|
||||
const keyStr = this.keyToString(key);
|
||||
const existed = this.entries.has(keyStr);
|
||||
this.entries.set(keyStr, { key, value });
|
||||
|
||||
if (!existed) {
|
||||
// Insert in sorted order
|
||||
const idx = this.sortedKeys.findIndex(k => this.comparator(k, key) > 0);
|
||||
if (idx === -1) {
|
||||
this.sortedKeys.push(key);
|
||||
} else {
|
||||
this.sortedKeys.splice(idx, 0, key);
|
||||
}
|
||||
}
|
||||
return !existed;
|
||||
}
|
||||
|
||||
get(key: K): V | undefined {
|
||||
const entry = this.entries.get(this.keyToString(key));
|
||||
return entry?.value;
|
||||
}
|
||||
|
||||
delete(key: K): boolean {
|
||||
const keyStr = this.keyToString(key);
|
||||
if (this.entries.has(keyStr)) {
|
||||
this.entries.delete(keyStr);
|
||||
const idx = this.sortedKeys.findIndex(k => this.comparator(k, key) === 0);
|
||||
if (idx !== -1) {
|
||||
this.sortedKeys.splice(idx, 1);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
forRange(
|
||||
lowKey: K | undefined,
|
||||
highKey: K | undefined,
|
||||
lowInclusive: boolean,
|
||||
highInclusive: boolean,
|
||||
callback: (value: V, key: K) => void
|
||||
): void {
|
||||
for (const key of this.sortedKeys) {
|
||||
// Check low bound
|
||||
if (lowKey !== undefined) {
|
||||
const cmp = this.comparator(key, lowKey);
|
||||
if (cmp < 0) continue;
|
||||
if (cmp === 0 && !lowInclusive) continue;
|
||||
}
|
||||
|
||||
// Check high bound
|
||||
if (highKey !== undefined) {
|
||||
const cmp = this.comparator(key, highKey);
|
||||
if (cmp > 0) break;
|
||||
if (cmp === 0 && !highInclusive) break;
|
||||
}
|
||||
|
||||
const entry = this.entries.get(this.keyToString(key));
|
||||
if (entry) {
|
||||
callback(entry.value, key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
import type {
|
||||
Document,
|
||||
IStoredDocument,
|
||||
IIndexSpecification,
|
||||
IIndexInfo,
|
||||
ICreateIndexOptions,
|
||||
} from '../types/interfaces.js';
|
||||
import { TsmdbDuplicateKeyError, TsmdbIndexError } from '../errors/TsmdbErrors.js';
|
||||
import { QueryEngine } from './QueryEngine.js';
|
||||
|
||||
/**
|
||||
* Comparator for B-Tree that handles mixed types consistently
|
||||
*/
|
||||
function indexKeyComparator(a: any, b: any): number {
|
||||
// Handle null/undefined
|
||||
if (a === null || a === undefined) {
|
||||
if (b === null || b === undefined) return 0;
|
||||
return -1;
|
||||
}
|
||||
if (b === null || b === undefined) return 1;
|
||||
|
||||
// Handle arrays (compound keys)
|
||||
if (Array.isArray(a) && Array.isArray(b)) {
|
||||
for (let i = 0; i < Math.max(a.length, b.length); i++) {
|
||||
const cmp = indexKeyComparator(a[i], b[i]);
|
||||
if (cmp !== 0) return cmp;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Handle ObjectId
|
||||
if (a instanceof plugins.bson.ObjectId && b instanceof plugins.bson.ObjectId) {
|
||||
return a.toHexString().localeCompare(b.toHexString());
|
||||
}
|
||||
|
||||
// Handle Date
|
||||
if (a instanceof Date && b instanceof Date) {
|
||||
return a.getTime() - b.getTime();
|
||||
}
|
||||
|
||||
// Handle different types - use type ordering (null < number < string < object)
|
||||
const typeOrder = (v: any): number => {
|
||||
if (v === null || v === undefined) return 0;
|
||||
if (typeof v === 'number') return 1;
|
||||
if (typeof v === 'string') return 2;
|
||||
if (typeof v === 'boolean') return 3;
|
||||
if (v instanceof Date) return 4;
|
||||
if (v instanceof plugins.bson.ObjectId) return 5;
|
||||
return 6;
|
||||
};
|
||||
|
||||
const typeA = typeOrder(a);
|
||||
const typeB = typeOrder(b);
|
||||
if (typeA !== typeB) return typeA - typeB;
|
||||
|
||||
// Same type comparison
|
||||
if (typeof a === 'number') return a - b;
|
||||
if (typeof a === 'string') return a.localeCompare(b);
|
||||
if (typeof a === 'boolean') return (a ? 1 : 0) - (b ? 1 : 0);
|
||||
|
||||
// Fallback to string comparison
|
||||
return String(a).localeCompare(String(b));
|
||||
}
|
||||
|
||||
/**
|
||||
* Index data structure using B-Tree for range queries
|
||||
*/
|
||||
interface IIndexData {
|
||||
name: string;
|
||||
key: Record<string, 1 | -1 | string>;
|
||||
unique: boolean;
|
||||
sparse: boolean;
|
||||
expireAfterSeconds?: number;
|
||||
// B-Tree for ordered index lookups (supports range queries)
|
||||
btree: SimpleBTree<any, Set<string>>;
|
||||
// Hash map for fast equality lookups
|
||||
hashMap: Map<string, Set<string>>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Index engine for managing indexes and query optimization
|
||||
*/
|
||||
export class IndexEngine {
|
||||
private dbName: string;
|
||||
private collName: string;
|
||||
private storage: IStorageAdapter;
|
||||
private indexes: Map<string, IIndexData> = new Map();
|
||||
private initialized = false;
|
||||
|
||||
constructor(dbName: string, collName: string, storage: IStorageAdapter) {
|
||||
this.dbName = dbName;
|
||||
this.collName = collName;
|
||||
this.storage = storage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize indexes from storage
|
||||
*/
|
||||
async initialize(): Promise<void> {
|
||||
if (this.initialized) return;
|
||||
|
||||
const storedIndexes = await this.storage.getIndexes(this.dbName, this.collName);
|
||||
const documents = await this.storage.findAll(this.dbName, this.collName);
|
||||
|
||||
for (const indexSpec of storedIndexes) {
|
||||
const indexData: IIndexData = {
|
||||
name: indexSpec.name,
|
||||
key: indexSpec.key,
|
||||
unique: indexSpec.unique || false,
|
||||
sparse: indexSpec.sparse || false,
|
||||
expireAfterSeconds: indexSpec.expireAfterSeconds,
|
||||
btree: new SimpleBTree<any, Set<string>>(undefined, indexKeyComparator),
|
||||
hashMap: new Map(),
|
||||
};
|
||||
|
||||
// Build index entries
|
||||
for (const doc of documents) {
|
||||
const keyValue = this.extractKeyValue(doc, indexSpec.key);
|
||||
if (keyValue !== null || !indexData.sparse) {
|
||||
const keyStr = JSON.stringify(keyValue);
|
||||
|
||||
// Add to hash map
|
||||
if (!indexData.hashMap.has(keyStr)) {
|
||||
indexData.hashMap.set(keyStr, new Set());
|
||||
}
|
||||
indexData.hashMap.get(keyStr)!.add(doc._id.toHexString());
|
||||
|
||||
// Add to B-tree
|
||||
const existing = indexData.btree.get(keyValue);
|
||||
if (existing) {
|
||||
existing.add(doc._id.toHexString());
|
||||
} else {
|
||||
indexData.btree.set(keyValue, new Set([doc._id.toHexString()]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.indexes.set(indexSpec.name, indexData);
|
||||
}
|
||||
|
||||
this.initialized = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new index
|
||||
*/
|
||||
async createIndex(
|
||||
key: Record<string, 1 | -1 | 'text' | '2dsphere'>,
|
||||
options?: ICreateIndexOptions
|
||||
): Promise<string> {
|
||||
await this.initialize();
|
||||
|
||||
// Generate index name if not provided
|
||||
const name = options?.name || this.generateIndexName(key);
|
||||
|
||||
// Check if index already exists
|
||||
if (this.indexes.has(name)) {
|
||||
return name;
|
||||
}
|
||||
|
||||
// Create index data structure
|
||||
const indexData: IIndexData = {
|
||||
name,
|
||||
key: key as Record<string, 1 | -1 | string>,
|
||||
unique: options?.unique || false,
|
||||
sparse: options?.sparse || false,
|
||||
expireAfterSeconds: options?.expireAfterSeconds,
|
||||
btree: new SimpleBTree<any, Set<string>>(undefined, indexKeyComparator),
|
||||
hashMap: new Map(),
|
||||
};
|
||||
|
||||
// Build index from existing documents
|
||||
const documents = await this.storage.findAll(this.dbName, this.collName);
|
||||
|
||||
for (const doc of documents) {
|
||||
const keyValue = this.extractKeyValue(doc, key);
|
||||
|
||||
if (keyValue === null && indexData.sparse) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const keyStr = JSON.stringify(keyValue);
|
||||
|
||||
if (indexData.unique && indexData.hashMap.has(keyStr)) {
|
||||
throw new TsmdbDuplicateKeyError(
|
||||
`E11000 duplicate key error index: ${this.dbName}.${this.collName}.$${name}`,
|
||||
key as Record<string, 1>,
|
||||
keyValue
|
||||
);
|
||||
}
|
||||
|
||||
// Add to hash map
|
||||
if (!indexData.hashMap.has(keyStr)) {
|
||||
indexData.hashMap.set(keyStr, new Set());
|
||||
}
|
||||
indexData.hashMap.get(keyStr)!.add(doc._id.toHexString());
|
||||
|
||||
// Add to B-tree
|
||||
const existing = indexData.btree.get(keyValue);
|
||||
if (existing) {
|
||||
existing.add(doc._id.toHexString());
|
||||
} else {
|
||||
indexData.btree.set(keyValue, new Set([doc._id.toHexString()]));
|
||||
}
|
||||
}
|
||||
|
||||
// Store index
|
||||
this.indexes.set(name, indexData);
|
||||
await this.storage.saveIndex(this.dbName, this.collName, name, {
|
||||
key,
|
||||
unique: options?.unique,
|
||||
sparse: options?.sparse,
|
||||
expireAfterSeconds: options?.expireAfterSeconds,
|
||||
});
|
||||
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Drop an index
|
||||
*/
|
||||
async dropIndex(name: string): Promise<void> {
|
||||
await this.initialize();
|
||||
|
||||
if (name === '_id_') {
|
||||
throw new TsmdbIndexError('cannot drop _id index');
|
||||
}
|
||||
|
||||
if (!this.indexes.has(name)) {
|
||||
throw new TsmdbIndexError(`index not found: ${name}`);
|
||||
}
|
||||
|
||||
this.indexes.delete(name);
|
||||
await this.storage.dropIndex(this.dbName, this.collName, name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Drop all indexes except _id
|
||||
*/
|
||||
async dropAllIndexes(): Promise<void> {
|
||||
await this.initialize();
|
||||
|
||||
const names = Array.from(this.indexes.keys()).filter(n => n !== '_id_');
|
||||
for (const name of names) {
|
||||
this.indexes.delete(name);
|
||||
await this.storage.dropIndex(this.dbName, this.collName, name);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all indexes
|
||||
*/
|
||||
async listIndexes(): Promise<IIndexInfo[]> {
|
||||
await this.initialize();
|
||||
|
||||
return Array.from(this.indexes.values()).map(idx => ({
|
||||
v: 2,
|
||||
key: idx.key,
|
||||
name: idx.name,
|
||||
unique: idx.unique || undefined,
|
||||
sparse: idx.sparse || undefined,
|
||||
expireAfterSeconds: idx.expireAfterSeconds,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an index exists
|
||||
*/
|
||||
async indexExists(name: string): Promise<boolean> {
|
||||
await this.initialize();
|
||||
return this.indexes.has(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update index entries after document insert
|
||||
*/
|
||||
async onInsert(doc: IStoredDocument): Promise<void> {
|
||||
await this.initialize();
|
||||
|
||||
for (const [name, indexData] of this.indexes) {
|
||||
const keyValue = this.extractKeyValue(doc, indexData.key);
|
||||
|
||||
if (keyValue === null && indexData.sparse) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const keyStr = JSON.stringify(keyValue);
|
||||
|
||||
// Check unique constraint
|
||||
if (indexData.unique) {
|
||||
const existing = indexData.hashMap.get(keyStr);
|
||||
if (existing && existing.size > 0) {
|
||||
throw new TsmdbDuplicateKeyError(
|
||||
`E11000 duplicate key error collection: ${this.dbName}.${this.collName} index: ${name}`,
|
||||
indexData.key as Record<string, 1>,
|
||||
keyValue
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Add to hash map
|
||||
if (!indexData.hashMap.has(keyStr)) {
|
||||
indexData.hashMap.set(keyStr, new Set());
|
||||
}
|
||||
indexData.hashMap.get(keyStr)!.add(doc._id.toHexString());
|
||||
|
||||
// Add to B-tree
|
||||
const btreeSet = indexData.btree.get(keyValue);
|
||||
if (btreeSet) {
|
||||
btreeSet.add(doc._id.toHexString());
|
||||
} else {
|
||||
indexData.btree.set(keyValue, new Set([doc._id.toHexString()]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update index entries after document update
|
||||
*/
|
||||
async onUpdate(oldDoc: IStoredDocument, newDoc: IStoredDocument): Promise<void> {
|
||||
await this.initialize();
|
||||
|
||||
for (const [name, indexData] of this.indexes) {
|
||||
const oldKeyValue = this.extractKeyValue(oldDoc, indexData.key);
|
||||
const newKeyValue = this.extractKeyValue(newDoc, indexData.key);
|
||||
const oldKeyStr = JSON.stringify(oldKeyValue);
|
||||
const newKeyStr = JSON.stringify(newKeyValue);
|
||||
|
||||
// Remove old entry if key changed
|
||||
if (oldKeyStr !== newKeyStr) {
|
||||
if (oldKeyValue !== null || !indexData.sparse) {
|
||||
// Remove from hash map
|
||||
const oldHashSet = indexData.hashMap.get(oldKeyStr);
|
||||
if (oldHashSet) {
|
||||
oldHashSet.delete(oldDoc._id.toHexString());
|
||||
if (oldHashSet.size === 0) {
|
||||
indexData.hashMap.delete(oldKeyStr);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove from B-tree
|
||||
const oldBtreeSet = indexData.btree.get(oldKeyValue);
|
||||
if (oldBtreeSet) {
|
||||
oldBtreeSet.delete(oldDoc._id.toHexString());
|
||||
if (oldBtreeSet.size === 0) {
|
||||
indexData.btree.delete(oldKeyValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add new entry
|
||||
if (newKeyValue !== null || !indexData.sparse) {
|
||||
// Check unique constraint
|
||||
if (indexData.unique) {
|
||||
const existing = indexData.hashMap.get(newKeyStr);
|
||||
if (existing && existing.size > 0) {
|
||||
throw new TsmdbDuplicateKeyError(
|
||||
`E11000 duplicate key error collection: ${this.dbName}.${this.collName} index: ${name}`,
|
||||
indexData.key as Record<string, 1>,
|
||||
newKeyValue
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Add to hash map
|
||||
if (!indexData.hashMap.has(newKeyStr)) {
|
||||
indexData.hashMap.set(newKeyStr, new Set());
|
||||
}
|
||||
indexData.hashMap.get(newKeyStr)!.add(newDoc._id.toHexString());
|
||||
|
||||
// Add to B-tree
|
||||
const newBtreeSet = indexData.btree.get(newKeyValue);
|
||||
if (newBtreeSet) {
|
||||
newBtreeSet.add(newDoc._id.toHexString());
|
||||
} else {
|
||||
indexData.btree.set(newKeyValue, new Set([newDoc._id.toHexString()]));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update index entries after document delete
|
||||
*/
|
||||
async onDelete(doc: IStoredDocument): Promise<void> {
|
||||
await this.initialize();
|
||||
|
||||
for (const indexData of this.indexes.values()) {
|
||||
const keyValue = this.extractKeyValue(doc, indexData.key);
|
||||
|
||||
if (keyValue === null && indexData.sparse) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const keyStr = JSON.stringify(keyValue);
|
||||
|
||||
// Remove from hash map
|
||||
const hashSet = indexData.hashMap.get(keyStr);
|
||||
if (hashSet) {
|
||||
hashSet.delete(doc._id.toHexString());
|
||||
if (hashSet.size === 0) {
|
||||
indexData.hashMap.delete(keyStr);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove from B-tree
|
||||
const btreeSet = indexData.btree.get(keyValue);
|
||||
if (btreeSet) {
|
||||
btreeSet.delete(doc._id.toHexString());
|
||||
if (btreeSet.size === 0) {
|
||||
indexData.btree.delete(keyValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the best index for a query
|
||||
*/
|
||||
selectIndex(filter: Document): { name: string; data: IIndexData } | null {
|
||||
if (!filter || Object.keys(filter).length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Get filter fields and operators
|
||||
const filterInfo = this.analyzeFilter(filter);
|
||||
|
||||
// Score each index
|
||||
let bestIndex: { name: string; data: IIndexData } | null = null;
|
||||
let bestScore = 0;
|
||||
|
||||
for (const [name, indexData] of this.indexes) {
|
||||
const indexFields = Object.keys(indexData.key);
|
||||
let score = 0;
|
||||
|
||||
// Count how many index fields can be used
|
||||
for (const field of indexFields) {
|
||||
const info = filterInfo.get(field);
|
||||
if (!info) break;
|
||||
|
||||
// Equality is best
|
||||
if (info.equality) {
|
||||
score += 2;
|
||||
} else if (info.range) {
|
||||
// Range queries can use B-tree
|
||||
score += 1;
|
||||
} else if (info.in) {
|
||||
score += 1.5;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Prefer unique indexes
|
||||
if (indexData.unique && score > 0) {
|
||||
score += 0.5;
|
||||
}
|
||||
|
||||
if (score > bestScore) {
|
||||
bestScore = score;
|
||||
bestIndex = { name, data: indexData };
|
||||
}
|
||||
}
|
||||
|
||||
return bestIndex;
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyze filter to extract field operators
|
||||
*/
|
||||
private analyzeFilter(filter: Document): Map<string, { equality: boolean; range: boolean; in: boolean; ops: Record<string, any> }> {
|
||||
const result = new Map<string, { equality: boolean; range: boolean; in: boolean; ops: Record<string, any> }>();
|
||||
|
||||
for (const [key, value] of Object.entries(filter)) {
|
||||
if (key.startsWith('$')) continue;
|
||||
|
||||
const info = { equality: false, range: false, in: false, ops: {} as Record<string, any> };
|
||||
|
||||
if (typeof value !== 'object' || value === null || value instanceof plugins.bson.ObjectId || value instanceof Date) {
|
||||
info.equality = true;
|
||||
info.ops['$eq'] = value;
|
||||
} else {
|
||||
const ops = value as Record<string, any>;
|
||||
if (ops.$eq !== undefined) {
|
||||
info.equality = true;
|
||||
info.ops['$eq'] = ops.$eq;
|
||||
}
|
||||
if (ops.$in !== undefined) {
|
||||
info.in = true;
|
||||
info.ops['$in'] = ops.$in;
|
||||
}
|
||||
if (ops.$gt !== undefined || ops.$gte !== undefined || ops.$lt !== undefined || ops.$lte !== undefined) {
|
||||
info.range = true;
|
||||
if (ops.$gt !== undefined) info.ops['$gt'] = ops.$gt;
|
||||
if (ops.$gte !== undefined) info.ops['$gte'] = ops.$gte;
|
||||
if (ops.$lt !== undefined) info.ops['$lt'] = ops.$lt;
|
||||
if (ops.$lte !== undefined) info.ops['$lte'] = ops.$lte;
|
||||
}
|
||||
}
|
||||
|
||||
result.set(key, info);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use index to find candidate document IDs (supports range queries with B-tree)
|
||||
*/
|
||||
async findCandidateIds(filter: Document): Promise<Set<string> | null> {
|
||||
await this.initialize();
|
||||
|
||||
const index = this.selectIndex(filter);
|
||||
if (!index) return null;
|
||||
|
||||
const filterInfo = this.analyzeFilter(filter);
|
||||
const indexFields = Object.keys(index.data.key);
|
||||
|
||||
// For single-field indexes with range queries, use B-tree
|
||||
if (indexFields.length === 1) {
|
||||
const field = indexFields[0];
|
||||
const info = filterInfo.get(field);
|
||||
|
||||
if (info) {
|
||||
// Handle equality using hash map (faster)
|
||||
if (info.equality) {
|
||||
const keyStr = JSON.stringify(info.ops['$eq']);
|
||||
return index.data.hashMap.get(keyStr) || new Set();
|
||||
}
|
||||
|
||||
// Handle $in using hash map
|
||||
if (info.in) {
|
||||
const results = new Set<string>();
|
||||
for (const val of info.ops['$in']) {
|
||||
const keyStr = JSON.stringify(val);
|
||||
const ids = index.data.hashMap.get(keyStr);
|
||||
if (ids) {
|
||||
for (const id of ids) {
|
||||
results.add(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
// Handle range queries using B-tree
|
||||
if (info.range) {
|
||||
return this.findRangeCandidates(index.data, info.ops);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// For compound indexes, use hash map with partial key matching
|
||||
const equalityValues: Record<string, any> = {};
|
||||
|
||||
for (const field of indexFields) {
|
||||
const info = filterInfo.get(field);
|
||||
if (!info) break;
|
||||
|
||||
if (info.equality) {
|
||||
equalityValues[field] = info.ops['$eq'];
|
||||
} else if (info.in) {
|
||||
// Handle $in with multiple lookups
|
||||
const results = new Set<string>();
|
||||
for (const val of info.ops['$in']) {
|
||||
equalityValues[field] = val;
|
||||
const keyStr = JSON.stringify(this.buildKeyValue(equalityValues, index.data.key));
|
||||
const ids = index.data.hashMap.get(keyStr);
|
||||
if (ids) {
|
||||
for (const id of ids) {
|
||||
results.add(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
return results;
|
||||
} else {
|
||||
break; // Non-equality/in operator, stop here
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(equalityValues).length > 0) {
|
||||
const keyStr = JSON.stringify(this.buildKeyValue(equalityValues, index.data.key));
|
||||
return index.data.hashMap.get(keyStr) || new Set();
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find candidates using B-tree range scan
|
||||
*/
|
||||
private findRangeCandidates(indexData: IIndexData, ops: Record<string, any>): Set<string> {
|
||||
const results = new Set<string>();
|
||||
|
||||
let lowKey: any = undefined;
|
||||
let highKey: any = undefined;
|
||||
let lowInclusive = true;
|
||||
let highInclusive = true;
|
||||
|
||||
if (ops['$gt'] !== undefined) {
|
||||
lowKey = ops['$gt'];
|
||||
lowInclusive = false;
|
||||
}
|
||||
if (ops['$gte'] !== undefined) {
|
||||
lowKey = ops['$gte'];
|
||||
lowInclusive = true;
|
||||
}
|
||||
if (ops['$lt'] !== undefined) {
|
||||
highKey = ops['$lt'];
|
||||
highInclusive = false;
|
||||
}
|
||||
if (ops['$lte'] !== undefined) {
|
||||
highKey = ops['$lte'];
|
||||
highInclusive = true;
|
||||
}
|
||||
|
||||
// Use B-tree range iteration
|
||||
indexData.btree.forRange(lowKey, highKey, lowInclusive, highInclusive, (value, key) => {
|
||||
if (value) {
|
||||
for (const id of value) {
|
||||
results.add(id);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Helper Methods
|
||||
// ============================================================================
|
||||
|
||||
private generateIndexName(key: Record<string, any>): string {
|
||||
return Object.entries(key)
|
||||
.map(([field, dir]) => `${field}_${dir}`)
|
||||
.join('_');
|
||||
}
|
||||
|
||||
private extractKeyValue(doc: Document, key: Record<string, any>): any {
|
||||
const values: any[] = [];
|
||||
|
||||
for (const field of Object.keys(key)) {
|
||||
const value = QueryEngine.getNestedValue(doc, field);
|
||||
values.push(value === undefined ? null : value);
|
||||
}
|
||||
|
||||
// For single-field index, return the value directly
|
||||
if (values.length === 1) {
|
||||
return values[0];
|
||||
}
|
||||
|
||||
return values;
|
||||
}
|
||||
|
||||
private buildKeyValue(values: Record<string, any>, key: Record<string, any>): any {
|
||||
const result: any[] = [];
|
||||
|
||||
for (const field of Object.keys(key)) {
|
||||
result.push(values[field] !== undefined ? values[field] : null);
|
||||
}
|
||||
|
||||
if (result.length === 1) {
|
||||
return result[0];
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private getFilterFields(filter: Document, prefix = ''): string[] {
|
||||
const fields: string[] = [];
|
||||
|
||||
for (const [key, value] of Object.entries(filter)) {
|
||||
if (key.startsWith('$')) {
|
||||
// Logical operator
|
||||
if (key === '$and' || key === '$or' || key === '$nor') {
|
||||
for (const subFilter of value as Document[]) {
|
||||
fields.push(...this.getFilterFields(subFilter, prefix));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const fullKey = prefix ? `${prefix}.${key}` : key;
|
||||
fields.push(fullKey);
|
||||
|
||||
// Check for nested filters
|
||||
if (typeof value === 'object' && value !== null && !Array.isArray(value)) {
|
||||
const subKeys = Object.keys(value);
|
||||
if (subKeys.length > 0 && !subKeys[0].startsWith('$')) {
|
||||
fields.push(...this.getFilterFields(value, fullKey));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
private getFilterValue(filter: Document, field: string): any {
|
||||
// Handle dot notation
|
||||
const parts = field.split('.');
|
||||
let current: any = filter;
|
||||
|
||||
for (const part of parts) {
|
||||
if (current === null || current === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
current = current[part];
|
||||
}
|
||||
|
||||
return current;
|
||||
}
|
||||
}
|
||||
301
ts/ts_tsmdb/engine/QueryEngine.ts
Normal file
301
ts/ts_tsmdb/engine/QueryEngine.ts
Normal file
@@ -0,0 +1,301 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import type { Document, IStoredDocument, ISortSpecification, ISortDirection } from '../types/interfaces.js';
|
||||
|
||||
// Import mingo Query class
|
||||
import { Query } from 'mingo';
|
||||
|
||||
/**
|
||||
* Query engine using mingo for MongoDB-compatible query matching
|
||||
*/
|
||||
export class QueryEngine {
|
||||
/**
|
||||
* Filter documents by a MongoDB query filter
|
||||
*/
|
||||
static filter(documents: IStoredDocument[], filter: Document): IStoredDocument[] {
|
||||
if (!filter || Object.keys(filter).length === 0) {
|
||||
return documents;
|
||||
}
|
||||
|
||||
const query = new Query(filter);
|
||||
return documents.filter(doc => query.test(doc));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test if a single document matches a filter
|
||||
*/
|
||||
static matches(document: Document, filter: Document): boolean {
|
||||
if (!filter || Object.keys(filter).length === 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const query = new Query(filter);
|
||||
return query.test(document);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find a single document matching the filter
|
||||
*/
|
||||
static findOne(documents: IStoredDocument[], filter: Document): IStoredDocument | null {
|
||||
if (!filter || Object.keys(filter).length === 0) {
|
||||
return documents[0] || null;
|
||||
}
|
||||
|
||||
const query = new Query(filter);
|
||||
for (const doc of documents) {
|
||||
if (query.test(doc)) {
|
||||
return doc;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sort documents by a sort specification
|
||||
*/
|
||||
static sort(documents: IStoredDocument[], sort: ISortSpecification): IStoredDocument[] {
|
||||
if (!sort) {
|
||||
return documents;
|
||||
}
|
||||
|
||||
// Normalize sort specification to array of [field, direction] pairs
|
||||
const sortFields: Array<[string, number]> = [];
|
||||
|
||||
if (Array.isArray(sort)) {
|
||||
for (const [field, direction] of sort) {
|
||||
sortFields.push([field, this.normalizeDirection(direction)]);
|
||||
}
|
||||
} else {
|
||||
for (const [field, direction] of Object.entries(sort)) {
|
||||
sortFields.push([field, this.normalizeDirection(direction)]);
|
||||
}
|
||||
}
|
||||
|
||||
return [...documents].sort((a, b) => {
|
||||
for (const [field, direction] of sortFields) {
|
||||
const aVal = this.getNestedValue(a, field);
|
||||
const bVal = this.getNestedValue(b, field);
|
||||
|
||||
const comparison = this.compareValues(aVal, bVal);
|
||||
if (comparison !== 0) {
|
||||
return comparison * direction;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply projection to documents
|
||||
*/
|
||||
static project(documents: IStoredDocument[], projection: Document): Document[] {
|
||||
if (!projection || Object.keys(projection).length === 0) {
|
||||
return documents;
|
||||
}
|
||||
|
||||
// Determine if this is inclusion or exclusion projection
|
||||
const keys = Object.keys(projection);
|
||||
const hasInclusion = keys.some(k => k !== '_id' && projection[k] === 1);
|
||||
const hasExclusion = keys.some(k => k !== '_id' && projection[k] === 0);
|
||||
|
||||
// Can't mix inclusion and exclusion (except for _id)
|
||||
if (hasInclusion && hasExclusion) {
|
||||
throw new Error('Cannot mix inclusion and exclusion in projection');
|
||||
}
|
||||
|
||||
return documents.map(doc => {
|
||||
if (hasInclusion) {
|
||||
// Inclusion projection
|
||||
const result: Document = {};
|
||||
|
||||
// Handle _id
|
||||
if (projection._id !== 0 && projection._id !== false) {
|
||||
result._id = doc._id;
|
||||
}
|
||||
|
||||
for (const key of keys) {
|
||||
if (key === '_id') continue;
|
||||
if (projection[key] === 1 || projection[key] === true) {
|
||||
const value = this.getNestedValue(doc, key);
|
||||
if (value !== undefined) {
|
||||
this.setNestedValue(result, key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
} else {
|
||||
// Exclusion projection - start with copy and remove fields
|
||||
const result = { ...doc };
|
||||
|
||||
for (const key of keys) {
|
||||
if (projection[key] === 0 || projection[key] === false) {
|
||||
this.deleteNestedValue(result, key);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get distinct values for a field
|
||||
*/
|
||||
static distinct(documents: IStoredDocument[], field: string, filter?: Document): any[] {
|
||||
let docs = documents;
|
||||
if (filter && Object.keys(filter).length > 0) {
|
||||
docs = this.filter(documents, filter);
|
||||
}
|
||||
|
||||
const values = new Set<any>();
|
||||
for (const doc of docs) {
|
||||
const value = this.getNestedValue(doc, field);
|
||||
if (value !== undefined) {
|
||||
if (Array.isArray(value)) {
|
||||
// For arrays, add each element
|
||||
for (const v of value) {
|
||||
values.add(this.toComparable(v));
|
||||
}
|
||||
} else {
|
||||
values.add(this.toComparable(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(values);
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize sort direction to 1 or -1
|
||||
*/
|
||||
private static normalizeDirection(direction: ISortDirection): number {
|
||||
if (typeof direction === 'number') {
|
||||
return direction > 0 ? 1 : -1;
|
||||
}
|
||||
if (direction === 'asc' || direction === 'ascending') {
|
||||
return 1;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a nested value from an object using dot notation
|
||||
*/
|
||||
static getNestedValue(obj: any, path: string): any {
|
||||
const parts = path.split('.');
|
||||
let current = obj;
|
||||
|
||||
for (const part of parts) {
|
||||
if (current === null || current === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (Array.isArray(current)) {
|
||||
// Handle array access
|
||||
const index = parseInt(part, 10);
|
||||
if (!isNaN(index)) {
|
||||
current = current[index];
|
||||
} else {
|
||||
// Get the field from all array elements
|
||||
return current.map(item => this.getNestedValue(item, part)).flat();
|
||||
}
|
||||
} else {
|
||||
current = current[part];
|
||||
}
|
||||
}
|
||||
|
||||
return current;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a nested value in an object using dot notation
|
||||
*/
|
||||
private static setNestedValue(obj: any, path: string, value: any): void {
|
||||
const parts = path.split('.');
|
||||
let current = obj;
|
||||
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
const part = parts[i];
|
||||
if (!(part in current)) {
|
||||
current[part] = {};
|
||||
}
|
||||
current = current[part];
|
||||
}
|
||||
|
||||
current[parts[parts.length - 1]] = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a nested value from an object using dot notation
|
||||
*/
|
||||
private static deleteNestedValue(obj: any, path: string): void {
|
||||
const parts = path.split('.');
|
||||
let current = obj;
|
||||
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
const part = parts[i];
|
||||
if (!(part in current)) {
|
||||
return;
|
||||
}
|
||||
current = current[part];
|
||||
}
|
||||
|
||||
delete current[parts[parts.length - 1]];
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare two values for sorting
|
||||
*/
|
||||
private static compareValues(a: any, b: any): number {
|
||||
// Handle undefined/null
|
||||
if (a === undefined && b === undefined) return 0;
|
||||
if (a === undefined) return -1;
|
||||
if (b === undefined) return 1;
|
||||
if (a === null && b === null) return 0;
|
||||
if (a === null) return -1;
|
||||
if (b === null) return 1;
|
||||
|
||||
// Handle ObjectId
|
||||
if (a instanceof plugins.bson.ObjectId && b instanceof plugins.bson.ObjectId) {
|
||||
return a.toHexString().localeCompare(b.toHexString());
|
||||
}
|
||||
|
||||
// Handle dates
|
||||
if (a instanceof Date && b instanceof Date) {
|
||||
return a.getTime() - b.getTime();
|
||||
}
|
||||
|
||||
// Handle numbers
|
||||
if (typeof a === 'number' && typeof b === 'number') {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// Handle strings
|
||||
if (typeof a === 'string' && typeof b === 'string') {
|
||||
return a.localeCompare(b);
|
||||
}
|
||||
|
||||
// Handle booleans
|
||||
if (typeof a === 'boolean' && typeof b === 'boolean') {
|
||||
return (a ? 1 : 0) - (b ? 1 : 0);
|
||||
}
|
||||
|
||||
// Fall back to string comparison
|
||||
return String(a).localeCompare(String(b));
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a value to a comparable form (for distinct)
|
||||
*/
|
||||
private static toComparable(value: any): any {
|
||||
if (value instanceof plugins.bson.ObjectId) {
|
||||
return value.toHexString();
|
||||
}
|
||||
if (value instanceof Date) {
|
||||
return value.toISOString();
|
||||
}
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
return JSON.stringify(value);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
}
|
||||
393
ts/ts_tsmdb/engine/QueryPlanner.ts
Normal file
393
ts/ts_tsmdb/engine/QueryPlanner.ts
Normal file
@@ -0,0 +1,393 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import type { Document, IStoredDocument } from '../types/interfaces.js';
|
||||
import { IndexEngine } from './IndexEngine.js';
|
||||
|
||||
/**
|
||||
* Query execution plan types
|
||||
*/
|
||||
export type TQueryPlanType = 'IXSCAN' | 'COLLSCAN' | 'FETCH' | 'IXSCAN_RANGE';
|
||||
|
||||
/**
|
||||
* Represents a query execution plan
|
||||
*/
|
||||
export interface IQueryPlan {
|
||||
/** The type of scan used */
|
||||
type: TQueryPlanType;
|
||||
/** Index name if using an index */
|
||||
indexName?: string;
|
||||
/** Index key specification */
|
||||
indexKey?: Record<string, 1 | -1 | string>;
|
||||
/** Whether the query can be fully satisfied by the index */
|
||||
indexCovering: boolean;
|
||||
/** Estimated selectivity (0-1, lower is more selective) */
|
||||
selectivity: number;
|
||||
/** Whether range operators are used */
|
||||
usesRange: boolean;
|
||||
/** Fields used from the index */
|
||||
indexFieldsUsed: string[];
|
||||
/** Filter conditions that must be applied post-index lookup */
|
||||
residualFilter?: Document;
|
||||
/** Explanation for debugging */
|
||||
explanation: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter operator analysis
|
||||
*/
|
||||
interface IFilterOperatorInfo {
|
||||
field: string;
|
||||
operators: string[];
|
||||
equality: boolean;
|
||||
range: boolean;
|
||||
in: boolean;
|
||||
exists: boolean;
|
||||
regex: boolean;
|
||||
values: Record<string, any>;
|
||||
}
|
||||
|
||||
/**
|
||||
* QueryPlanner - Analyzes queries and selects optimal execution plans
|
||||
*/
|
||||
export class QueryPlanner {
|
||||
private indexEngine: IndexEngine;
|
||||
|
||||
constructor(indexEngine: IndexEngine) {
|
||||
this.indexEngine = indexEngine;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate an execution plan for a query filter
|
||||
*/
|
||||
async plan(filter: Document): Promise<IQueryPlan> {
|
||||
await this.indexEngine['initialize']();
|
||||
|
||||
// Empty filter = full collection scan
|
||||
if (!filter || Object.keys(filter).length === 0) {
|
||||
return {
|
||||
type: 'COLLSCAN',
|
||||
indexCovering: false,
|
||||
selectivity: 1.0,
|
||||
usesRange: false,
|
||||
indexFieldsUsed: [],
|
||||
explanation: 'No filter specified, full collection scan required',
|
||||
};
|
||||
}
|
||||
|
||||
// Analyze the filter
|
||||
const operatorInfo = this.analyzeFilter(filter);
|
||||
|
||||
// Get available indexes
|
||||
const indexes = await this.indexEngine.listIndexes();
|
||||
|
||||
// Score each index
|
||||
let bestPlan: IQueryPlan | null = null;
|
||||
let bestScore = -1;
|
||||
|
||||
for (const index of indexes) {
|
||||
const plan = this.scoreIndex(index, operatorInfo, filter);
|
||||
if (plan.selectivity < 1.0) {
|
||||
const score = this.calculateScore(plan);
|
||||
if (score > bestScore) {
|
||||
bestScore = score;
|
||||
bestPlan = plan;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no suitable index found, fall back to collection scan
|
||||
if (!bestPlan || bestScore <= 0) {
|
||||
return {
|
||||
type: 'COLLSCAN',
|
||||
indexCovering: false,
|
||||
selectivity: 1.0,
|
||||
usesRange: false,
|
||||
indexFieldsUsed: [],
|
||||
explanation: 'No suitable index found for this query',
|
||||
};
|
||||
}
|
||||
|
||||
return bestPlan;
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyze filter to extract operator information per field
|
||||
*/
|
||||
private analyzeFilter(filter: Document, prefix = ''): Map<string, IFilterOperatorInfo> {
|
||||
const result = new Map<string, IFilterOperatorInfo>();
|
||||
|
||||
for (const [key, value] of Object.entries(filter)) {
|
||||
// Skip logical operators at the top level
|
||||
if (key.startsWith('$')) {
|
||||
if (key === '$and' && Array.isArray(value)) {
|
||||
// Merge $and conditions
|
||||
for (const subFilter of value) {
|
||||
const subInfo = this.analyzeFilter(subFilter, prefix);
|
||||
for (const [field, info] of subInfo) {
|
||||
if (result.has(field)) {
|
||||
// Merge operators
|
||||
const existing = result.get(field)!;
|
||||
existing.operators.push(...info.operators);
|
||||
existing.equality = existing.equality || info.equality;
|
||||
existing.range = existing.range || info.range;
|
||||
existing.in = existing.in || info.in;
|
||||
Object.assign(existing.values, info.values);
|
||||
} else {
|
||||
result.set(field, info);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
const fullKey = prefix ? `${prefix}.${key}` : key;
|
||||
const info: IFilterOperatorInfo = {
|
||||
field: fullKey,
|
||||
operators: [],
|
||||
equality: false,
|
||||
range: false,
|
||||
in: false,
|
||||
exists: false,
|
||||
regex: false,
|
||||
values: {},
|
||||
};
|
||||
|
||||
if (typeof value !== 'object' || value === null || value instanceof plugins.bson.ObjectId || value instanceof Date) {
|
||||
// Direct equality
|
||||
info.equality = true;
|
||||
info.operators.push('$eq');
|
||||
info.values['$eq'] = value;
|
||||
} else if (Array.isArray(value)) {
|
||||
// Array equality (rare, but possible)
|
||||
info.equality = true;
|
||||
info.operators.push('$eq');
|
||||
info.values['$eq'] = value;
|
||||
} else {
|
||||
// Operator object
|
||||
for (const [op, opValue] of Object.entries(value)) {
|
||||
if (op.startsWith('$')) {
|
||||
info.operators.push(op);
|
||||
info.values[op] = opValue;
|
||||
|
||||
switch (op) {
|
||||
case '$eq':
|
||||
info.equality = true;
|
||||
break;
|
||||
case '$ne':
|
||||
case '$not':
|
||||
// These can use indexes but with low selectivity
|
||||
break;
|
||||
case '$in':
|
||||
info.in = true;
|
||||
break;
|
||||
case '$nin':
|
||||
// Can't efficiently use indexes
|
||||
break;
|
||||
case '$gt':
|
||||
case '$gte':
|
||||
case '$lt':
|
||||
case '$lte':
|
||||
info.range = true;
|
||||
break;
|
||||
case '$exists':
|
||||
info.exists = true;
|
||||
break;
|
||||
case '$regex':
|
||||
info.regex = true;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
// Nested object - recurse
|
||||
const nestedInfo = this.analyzeFilter({ [op]: opValue }, fullKey);
|
||||
for (const [nestedField, nestedFieldInfo] of nestedInfo) {
|
||||
result.set(nestedField, nestedFieldInfo);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (info.operators.length > 0) {
|
||||
result.set(fullKey, info);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Score an index for the given filter
|
||||
*/
|
||||
private scoreIndex(
|
||||
index: { name: string; key: Record<string, any>; unique?: boolean; sparse?: boolean },
|
||||
operatorInfo: Map<string, IFilterOperatorInfo>,
|
||||
filter: Document
|
||||
): IQueryPlan {
|
||||
const indexFields = Object.keys(index.key);
|
||||
const usedFields: string[] = [];
|
||||
let usesRange = false;
|
||||
let canUseIndex = true;
|
||||
let selectivity = 1.0;
|
||||
let residualFilter: Document | undefined;
|
||||
|
||||
// Check each index field in order
|
||||
for (const field of indexFields) {
|
||||
const info = operatorInfo.get(field);
|
||||
if (!info) {
|
||||
// Index field not in filter - stop here
|
||||
break;
|
||||
}
|
||||
|
||||
usedFields.push(field);
|
||||
|
||||
// Calculate selectivity based on operator
|
||||
if (info.equality) {
|
||||
// Equality has high selectivity
|
||||
selectivity *= 0.01; // Assume 1% match
|
||||
} else if (info.in) {
|
||||
// $in selectivity depends on array size
|
||||
const inValues = info.values['$in'];
|
||||
if (Array.isArray(inValues)) {
|
||||
selectivity *= Math.min(0.5, inValues.length * 0.01);
|
||||
} else {
|
||||
selectivity *= 0.1;
|
||||
}
|
||||
} else if (info.range) {
|
||||
// Range queries have moderate selectivity
|
||||
selectivity *= 0.25;
|
||||
usesRange = true;
|
||||
// After range, can't use more index fields efficiently
|
||||
break;
|
||||
} else if (info.exists) {
|
||||
// $exists can use sparse indexes
|
||||
selectivity *= 0.5;
|
||||
} else {
|
||||
// Other operators may not be indexable
|
||||
canUseIndex = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!canUseIndex || usedFields.length === 0) {
|
||||
return {
|
||||
type: 'COLLSCAN',
|
||||
indexCovering: false,
|
||||
selectivity: 1.0,
|
||||
usesRange: false,
|
||||
indexFieldsUsed: [],
|
||||
explanation: `Index ${index.name} cannot be used for this query`,
|
||||
};
|
||||
}
|
||||
|
||||
// Build residual filter for conditions not covered by index
|
||||
const coveredFields = new Set(usedFields);
|
||||
const residualConditions: Record<string, any> = {};
|
||||
for (const [field, info] of operatorInfo) {
|
||||
if (!coveredFields.has(field)) {
|
||||
// This field isn't covered by the index
|
||||
if (info.equality) {
|
||||
residualConditions[field] = info.values['$eq'];
|
||||
} else {
|
||||
residualConditions[field] = info.values;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(residualConditions).length > 0) {
|
||||
residualFilter = residualConditions;
|
||||
}
|
||||
|
||||
// Unique indexes have better selectivity for equality
|
||||
if (index.unique && usedFields.length === indexFields.length) {
|
||||
selectivity = Math.min(selectivity, 0.001); // At most 1 document
|
||||
}
|
||||
|
||||
return {
|
||||
type: usesRange ? 'IXSCAN_RANGE' : 'IXSCAN',
|
||||
indexName: index.name,
|
||||
indexKey: index.key,
|
||||
indexCovering: Object.keys(residualConditions).length === 0,
|
||||
selectivity,
|
||||
usesRange,
|
||||
indexFieldsUsed: usedFields,
|
||||
residualFilter,
|
||||
explanation: `Using index ${index.name} on fields [${usedFields.join(', ')}]`,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate overall score for a plan (higher is better)
|
||||
*/
|
||||
private calculateScore(plan: IQueryPlan): number {
|
||||
let score = 0;
|
||||
|
||||
// Lower selectivity is better (fewer documents to fetch)
|
||||
score += (1 - plan.selectivity) * 100;
|
||||
|
||||
// Index covering queries are best
|
||||
if (plan.indexCovering) {
|
||||
score += 50;
|
||||
}
|
||||
|
||||
// More index fields used is better
|
||||
score += plan.indexFieldsUsed.length * 10;
|
||||
|
||||
// Equality scans are better than range scans
|
||||
if (!plan.usesRange) {
|
||||
score += 20;
|
||||
}
|
||||
|
||||
return score;
|
||||
}
|
||||
|
||||
/**
|
||||
* Explain a query - returns detailed plan information
|
||||
*/
|
||||
async explain(filter: Document): Promise<{
|
||||
queryPlanner: {
|
||||
plannerVersion: number;
|
||||
namespace: string;
|
||||
indexFilterSet: boolean;
|
||||
winningPlan: IQueryPlan;
|
||||
rejectedPlans: IQueryPlan[];
|
||||
};
|
||||
}> {
|
||||
await this.indexEngine['initialize']();
|
||||
|
||||
// Analyze the filter
|
||||
const operatorInfo = this.analyzeFilter(filter);
|
||||
|
||||
// Get available indexes
|
||||
const indexes = await this.indexEngine.listIndexes();
|
||||
|
||||
// Score all indexes
|
||||
const plans: IQueryPlan[] = [];
|
||||
|
||||
for (const index of indexes) {
|
||||
const plan = this.scoreIndex(index, operatorInfo, filter);
|
||||
plans.push(plan);
|
||||
}
|
||||
|
||||
// Add collection scan as fallback
|
||||
plans.push({
|
||||
type: 'COLLSCAN',
|
||||
indexCovering: false,
|
||||
selectivity: 1.0,
|
||||
usesRange: false,
|
||||
indexFieldsUsed: [],
|
||||
explanation: 'Full collection scan',
|
||||
});
|
||||
|
||||
// Sort by score (best first)
|
||||
plans.sort((a, b) => this.calculateScore(b) - this.calculateScore(a));
|
||||
|
||||
return {
|
||||
queryPlanner: {
|
||||
plannerVersion: 1,
|
||||
namespace: `${this.indexEngine['dbName']}.${this.indexEngine['collName']}`,
|
||||
indexFilterSet: false,
|
||||
winningPlan: plans[0],
|
||||
rejectedPlans: plans.slice(1),
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
292
ts/ts_tsmdb/engine/SessionEngine.ts
Normal file
292
ts/ts_tsmdb/engine/SessionEngine.ts
Normal file
@@ -0,0 +1,292 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import type { TransactionEngine } from './TransactionEngine.js';
|
||||
|
||||
/**
|
||||
* Session state
|
||||
*/
|
||||
export interface ISession {
|
||||
/** Session ID (UUID) */
|
||||
id: string;
|
||||
/** Timestamp when the session was created */
|
||||
createdAt: number;
|
||||
/** Timestamp of the last activity */
|
||||
lastActivityAt: number;
|
||||
/** Current transaction ID if any */
|
||||
txnId?: string;
|
||||
/** Transaction number for ordering */
|
||||
txnNumber?: number;
|
||||
/** Whether the session is in a transaction */
|
||||
inTransaction: boolean;
|
||||
/** Session metadata */
|
||||
metadata?: Record<string, any>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Session engine options
|
||||
*/
|
||||
export interface ISessionEngineOptions {
|
||||
/** Session timeout in milliseconds (default: 30 minutes) */
|
||||
sessionTimeoutMs?: number;
|
||||
/** Interval to check for expired sessions in ms (default: 60 seconds) */
|
||||
cleanupIntervalMs?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Session engine for managing client sessions
|
||||
* - Tracks session lifecycle (create, touch, end)
|
||||
* - Links sessions to transactions
|
||||
* - Auto-aborts transactions on session expiry
|
||||
*/
|
||||
export class SessionEngine {
|
||||
private sessions: Map<string, ISession> = new Map();
|
||||
private sessionTimeoutMs: number;
|
||||
private cleanupInterval?: ReturnType<typeof setInterval>;
|
||||
private transactionEngine?: TransactionEngine;
|
||||
|
||||
constructor(options?: ISessionEngineOptions) {
|
||||
this.sessionTimeoutMs = options?.sessionTimeoutMs ?? 30 * 60 * 1000; // 30 minutes default
|
||||
const cleanupIntervalMs = options?.cleanupIntervalMs ?? 60 * 1000; // 1 minute default
|
||||
|
||||
// Start cleanup interval
|
||||
this.cleanupInterval = setInterval(() => {
|
||||
this.cleanupExpiredSessions();
|
||||
}, cleanupIntervalMs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the transaction engine to use for auto-abort
|
||||
*/
|
||||
setTransactionEngine(engine: TransactionEngine): void {
|
||||
this.transactionEngine = engine;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a new session
|
||||
*/
|
||||
startSession(sessionId?: string, metadata?: Record<string, any>): ISession {
|
||||
const id = sessionId ?? new plugins.bson.UUID().toHexString();
|
||||
const now = Date.now();
|
||||
|
||||
const session: ISession = {
|
||||
id,
|
||||
createdAt: now,
|
||||
lastActivityAt: now,
|
||||
inTransaction: false,
|
||||
metadata,
|
||||
};
|
||||
|
||||
this.sessions.set(id, session);
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a session by ID
|
||||
*/
|
||||
getSession(sessionId: string): ISession | undefined {
|
||||
const session = this.sessions.get(sessionId);
|
||||
if (session && this.isSessionExpired(session)) {
|
||||
// Session expired, clean it up
|
||||
this.endSession(sessionId);
|
||||
return undefined;
|
||||
}
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* Touch a session to update last activity time
|
||||
*/
|
||||
touchSession(sessionId: string): boolean {
|
||||
const session = this.sessions.get(sessionId);
|
||||
if (!session) return false;
|
||||
|
||||
if (this.isSessionExpired(session)) {
|
||||
this.endSession(sessionId);
|
||||
return false;
|
||||
}
|
||||
|
||||
session.lastActivityAt = Date.now();
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* End a session explicitly
|
||||
* This will also abort any active transaction
|
||||
*/
|
||||
async endSession(sessionId: string): Promise<boolean> {
|
||||
const session = this.sessions.get(sessionId);
|
||||
if (!session) return false;
|
||||
|
||||
// If session has an active transaction, abort it
|
||||
if (session.inTransaction && session.txnId && this.transactionEngine) {
|
||||
try {
|
||||
await this.transactionEngine.abortTransaction(session.txnId);
|
||||
} catch (e) {
|
||||
// Ignore abort errors during cleanup
|
||||
}
|
||||
}
|
||||
|
||||
this.sessions.delete(sessionId);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a transaction in a session
|
||||
*/
|
||||
startTransaction(sessionId: string, txnId: string, txnNumber?: number): boolean {
|
||||
const session = this.sessions.get(sessionId);
|
||||
if (!session) return false;
|
||||
|
||||
if (this.isSessionExpired(session)) {
|
||||
this.endSession(sessionId);
|
||||
return false;
|
||||
}
|
||||
|
||||
session.txnId = txnId;
|
||||
session.txnNumber = txnNumber;
|
||||
session.inTransaction = true;
|
||||
session.lastActivityAt = Date.now();
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* End a transaction in a session (commit or abort)
|
||||
*/
|
||||
endTransaction(sessionId: string): boolean {
|
||||
const session = this.sessions.get(sessionId);
|
||||
if (!session) return false;
|
||||
|
||||
session.txnId = undefined;
|
||||
session.txnNumber = undefined;
|
||||
session.inTransaction = false;
|
||||
session.lastActivityAt = Date.now();
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get transaction ID for a session
|
||||
*/
|
||||
getTransactionId(sessionId: string): string | undefined {
|
||||
const session = this.sessions.get(sessionId);
|
||||
return session?.txnId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if session is in a transaction
|
||||
*/
|
||||
isInTransaction(sessionId: string): boolean {
|
||||
const session = this.sessions.get(sessionId);
|
||||
return session?.inTransaction ?? false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a session is expired
|
||||
*/
|
||||
isSessionExpired(session: ISession): boolean {
|
||||
return Date.now() - session.lastActivityAt > this.sessionTimeoutMs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup expired sessions
|
||||
* This is called periodically by the cleanup interval
|
||||
*/
|
||||
private async cleanupExpiredSessions(): Promise<void> {
|
||||
const expiredSessions: string[] = [];
|
||||
|
||||
for (const [id, session] of this.sessions) {
|
||||
if (this.isSessionExpired(session)) {
|
||||
expiredSessions.push(id);
|
||||
}
|
||||
}
|
||||
|
||||
// End all expired sessions (this will also abort their transactions)
|
||||
for (const sessionId of expiredSessions) {
|
||||
await this.endSession(sessionId);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all active sessions
|
||||
*/
|
||||
listSessions(): ISession[] {
|
||||
const activeSessions: ISession[] = [];
|
||||
for (const session of this.sessions.values()) {
|
||||
if (!this.isSessionExpired(session)) {
|
||||
activeSessions.push(session);
|
||||
}
|
||||
}
|
||||
return activeSessions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get session count
|
||||
*/
|
||||
getSessionCount(): number {
|
||||
return this.sessions.size;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get sessions with active transactions
|
||||
*/
|
||||
getSessionsWithTransactions(): ISession[] {
|
||||
return this.listSessions().filter(s => s.inTransaction);
|
||||
}
|
||||
|
||||
/**
|
||||
* Refresh session timeout
|
||||
*/
|
||||
refreshSession(sessionId: string): boolean {
|
||||
return this.touchSession(sessionId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the session engine and cleanup
|
||||
*/
|
||||
close(): void {
|
||||
if (this.cleanupInterval) {
|
||||
clearInterval(this.cleanupInterval);
|
||||
this.cleanupInterval = undefined;
|
||||
}
|
||||
|
||||
// Clear all sessions
|
||||
this.sessions.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get or create a session for a given session ID
|
||||
* Useful for handling MongoDB driver session requests
|
||||
*/
|
||||
getOrCreateSession(sessionId: string): ISession {
|
||||
let session = this.getSession(sessionId);
|
||||
if (!session) {
|
||||
session = this.startSession(sessionId);
|
||||
} else {
|
||||
this.touchSession(sessionId);
|
||||
}
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract session ID from MongoDB lsid (logical session ID)
|
||||
*/
|
||||
static extractSessionId(lsid: any): string | undefined {
|
||||
if (!lsid) return undefined;
|
||||
|
||||
// MongoDB session ID format: { id: UUID }
|
||||
if (lsid.id) {
|
||||
if (lsid.id instanceof plugins.bson.UUID) {
|
||||
return lsid.id.toHexString();
|
||||
}
|
||||
if (typeof lsid.id === 'string') {
|
||||
return lsid.id;
|
||||
}
|
||||
if (lsid.id.$binary?.base64) {
|
||||
// Binary UUID format
|
||||
return Buffer.from(lsid.id.$binary.base64, 'base64').toString('hex');
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
351
ts/ts_tsmdb/engine/TransactionEngine.ts
Normal file
351
ts/ts_tsmdb/engine/TransactionEngine.ts
Normal file
@@ -0,0 +1,351 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import type { IStorageAdapter } from '../storage/IStorageAdapter.js';
|
||||
import type { Document, IStoredDocument, ITransactionOptions } from '../types/interfaces.js';
|
||||
import { TsmdbTransactionError, TsmdbWriteConflictError } from '../errors/TsmdbErrors.js';
|
||||
|
||||
/**
|
||||
* Transaction state
|
||||
*/
|
||||
export interface ITransactionState {
|
||||
id: string;
|
||||
sessionId: string;
|
||||
startTime: plugins.bson.Timestamp;
|
||||
status: 'active' | 'committed' | 'aborted';
|
||||
readSet: Map<string, Set<string>>; // ns -> document _ids read
|
||||
writeSet: Map<string, Map<string, { op: 'insert' | 'update' | 'delete'; doc?: IStoredDocument; originalDoc?: IStoredDocument }>>; // ns -> _id -> operation
|
||||
snapshots: Map<string, IStoredDocument[]>; // ns -> snapshot of documents
|
||||
}
|
||||
|
||||
/**
|
||||
* Transaction engine for ACID transaction support
|
||||
*/
|
||||
export class TransactionEngine {
|
||||
private storage: IStorageAdapter;
|
||||
private transactions: Map<string, ITransactionState> = new Map();
|
||||
private txnCounter = 0;
|
||||
|
||||
constructor(storage: IStorageAdapter) {
|
||||
this.storage = storage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a new transaction
|
||||
*/
|
||||
startTransaction(sessionId: string, options?: ITransactionOptions): string {
|
||||
this.txnCounter++;
|
||||
const txnId = `txn_${sessionId}_${this.txnCounter}`;
|
||||
|
||||
const transaction: ITransactionState = {
|
||||
id: txnId,
|
||||
sessionId,
|
||||
startTime: new plugins.bson.Timestamp({ t: Math.floor(Date.now() / 1000), i: this.txnCounter }),
|
||||
status: 'active',
|
||||
readSet: new Map(),
|
||||
writeSet: new Map(),
|
||||
snapshots: new Map(),
|
||||
};
|
||||
|
||||
this.transactions.set(txnId, transaction);
|
||||
return txnId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a transaction by ID
|
||||
*/
|
||||
getTransaction(txnId: string): ITransactionState | undefined {
|
||||
return this.transactions.get(txnId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a transaction is active
|
||||
*/
|
||||
isActive(txnId: string): boolean {
|
||||
const txn = this.transactions.get(txnId);
|
||||
return txn?.status === 'active';
|
||||
}
|
||||
|
||||
/**
|
||||
* Get or create a snapshot for a namespace
|
||||
*/
|
||||
async getSnapshot(txnId: string, dbName: string, collName: string): Promise<IStoredDocument[]> {
|
||||
const txn = this.transactions.get(txnId);
|
||||
if (!txn || txn.status !== 'active') {
|
||||
throw new TsmdbTransactionError('Transaction is not active');
|
||||
}
|
||||
|
||||
const ns = `${dbName}.${collName}`;
|
||||
if (!txn.snapshots.has(ns)) {
|
||||
const snapshot = await this.storage.createSnapshot(dbName, collName);
|
||||
txn.snapshots.set(ns, snapshot);
|
||||
}
|
||||
|
||||
// Apply transaction writes to snapshot
|
||||
const snapshot = txn.snapshots.get(ns)!;
|
||||
const writes = txn.writeSet.get(ns);
|
||||
|
||||
if (!writes) {
|
||||
return snapshot;
|
||||
}
|
||||
|
||||
// Create a modified view of the snapshot
|
||||
const result: IStoredDocument[] = [];
|
||||
const deletedIds = new Set<string>();
|
||||
const modifiedDocs = new Map<string, IStoredDocument>();
|
||||
|
||||
for (const [idStr, write] of writes) {
|
||||
if (write.op === 'delete') {
|
||||
deletedIds.add(idStr);
|
||||
} else if (write.op === 'update' || write.op === 'insert') {
|
||||
if (write.doc) {
|
||||
modifiedDocs.set(idStr, write.doc);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add existing documents (not deleted, possibly modified)
|
||||
for (const doc of snapshot) {
|
||||
const idStr = doc._id.toHexString();
|
||||
if (deletedIds.has(idStr)) {
|
||||
continue;
|
||||
}
|
||||
if (modifiedDocs.has(idStr)) {
|
||||
result.push(modifiedDocs.get(idStr)!);
|
||||
modifiedDocs.delete(idStr);
|
||||
} else {
|
||||
result.push(doc);
|
||||
}
|
||||
}
|
||||
|
||||
// Add new documents (inserts)
|
||||
for (const doc of modifiedDocs.values()) {
|
||||
result.push(doc);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Record a read operation
|
||||
*/
|
||||
recordRead(txnId: string, dbName: string, collName: string, docIds: string[]): void {
|
||||
const txn = this.transactions.get(txnId);
|
||||
if (!txn || txn.status !== 'active') return;
|
||||
|
||||
const ns = `${dbName}.${collName}`;
|
||||
if (!txn.readSet.has(ns)) {
|
||||
txn.readSet.set(ns, new Set());
|
||||
}
|
||||
|
||||
const readSet = txn.readSet.get(ns)!;
|
||||
for (const id of docIds) {
|
||||
readSet.add(id);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Record a write operation (insert)
|
||||
*/
|
||||
recordInsert(txnId: string, dbName: string, collName: string, doc: IStoredDocument): void {
|
||||
const txn = this.transactions.get(txnId);
|
||||
if (!txn || txn.status !== 'active') {
|
||||
throw new TsmdbTransactionError('Transaction is not active');
|
||||
}
|
||||
|
||||
const ns = `${dbName}.${collName}`;
|
||||
if (!txn.writeSet.has(ns)) {
|
||||
txn.writeSet.set(ns, new Map());
|
||||
}
|
||||
|
||||
txn.writeSet.get(ns)!.set(doc._id.toHexString(), {
|
||||
op: 'insert',
|
||||
doc,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Record a write operation (update)
|
||||
*/
|
||||
recordUpdate(
|
||||
txnId: string,
|
||||
dbName: string,
|
||||
collName: string,
|
||||
originalDoc: IStoredDocument,
|
||||
updatedDoc: IStoredDocument
|
||||
): void {
|
||||
const txn = this.transactions.get(txnId);
|
||||
if (!txn || txn.status !== 'active') {
|
||||
throw new TsmdbTransactionError('Transaction is not active');
|
||||
}
|
||||
|
||||
const ns = `${dbName}.${collName}`;
|
||||
if (!txn.writeSet.has(ns)) {
|
||||
txn.writeSet.set(ns, new Map());
|
||||
}
|
||||
|
||||
const idStr = originalDoc._id.toHexString();
|
||||
const existing = txn.writeSet.get(ns)!.get(idStr);
|
||||
|
||||
// If we already have a write for this document, update it
|
||||
if (existing) {
|
||||
existing.doc = updatedDoc;
|
||||
} else {
|
||||
txn.writeSet.get(ns)!.set(idStr, {
|
||||
op: 'update',
|
||||
doc: updatedDoc,
|
||||
originalDoc,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Record a write operation (delete)
|
||||
*/
|
||||
recordDelete(txnId: string, dbName: string, collName: string, doc: IStoredDocument): void {
|
||||
const txn = this.transactions.get(txnId);
|
||||
if (!txn || txn.status !== 'active') {
|
||||
throw new TsmdbTransactionError('Transaction is not active');
|
||||
}
|
||||
|
||||
const ns = `${dbName}.${collName}`;
|
||||
if (!txn.writeSet.has(ns)) {
|
||||
txn.writeSet.set(ns, new Map());
|
||||
}
|
||||
|
||||
const idStr = doc._id.toHexString();
|
||||
const existing = txn.writeSet.get(ns)!.get(idStr);
|
||||
|
||||
if (existing && existing.op === 'insert') {
|
||||
// If we inserted and then deleted, just remove the write
|
||||
txn.writeSet.get(ns)!.delete(idStr);
|
||||
} else {
|
||||
txn.writeSet.get(ns)!.set(idStr, {
|
||||
op: 'delete',
|
||||
originalDoc: doc,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Commit a transaction
|
||||
*/
|
||||
async commitTransaction(txnId: string): Promise<void> {
|
||||
const txn = this.transactions.get(txnId);
|
||||
if (!txn) {
|
||||
throw new TsmdbTransactionError('Transaction not found');
|
||||
}
|
||||
if (txn.status !== 'active') {
|
||||
throw new TsmdbTransactionError(`Cannot commit transaction in state: ${txn.status}`);
|
||||
}
|
||||
|
||||
// Check for write conflicts
|
||||
for (const [ns, writes] of txn.writeSet) {
|
||||
const [dbName, collName] = ns.split('.');
|
||||
const ids = Array.from(writes.keys()).map(id => new plugins.bson.ObjectId(id));
|
||||
|
||||
const hasConflicts = await this.storage.hasConflicts(dbName, collName, ids, txn.startTime);
|
||||
if (hasConflicts) {
|
||||
txn.status = 'aborted';
|
||||
throw new TsmdbWriteConflictError();
|
||||
}
|
||||
}
|
||||
|
||||
// Apply all writes
|
||||
for (const [ns, writes] of txn.writeSet) {
|
||||
const [dbName, collName] = ns.split('.');
|
||||
|
||||
for (const [idStr, write] of writes) {
|
||||
switch (write.op) {
|
||||
case 'insert':
|
||||
if (write.doc) {
|
||||
await this.storage.insertOne(dbName, collName, write.doc);
|
||||
}
|
||||
break;
|
||||
case 'update':
|
||||
if (write.doc) {
|
||||
await this.storage.updateById(dbName, collName, new plugins.bson.ObjectId(idStr), write.doc);
|
||||
}
|
||||
break;
|
||||
case 'delete':
|
||||
await this.storage.deleteById(dbName, collName, new plugins.bson.ObjectId(idStr));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
txn.status = 'committed';
|
||||
}
|
||||
|
||||
/**
|
||||
* Abort a transaction
|
||||
*/
|
||||
async abortTransaction(txnId: string): Promise<void> {
|
||||
const txn = this.transactions.get(txnId);
|
||||
if (!txn) {
|
||||
throw new TsmdbTransactionError('Transaction not found');
|
||||
}
|
||||
if (txn.status !== 'active') {
|
||||
// Already committed or aborted, just return
|
||||
return;
|
||||
}
|
||||
|
||||
// Simply discard all buffered writes
|
||||
txn.writeSet.clear();
|
||||
txn.readSet.clear();
|
||||
txn.snapshots.clear();
|
||||
txn.status = 'aborted';
|
||||
}
|
||||
|
||||
/**
|
||||
* End a transaction (cleanup)
|
||||
*/
|
||||
endTransaction(txnId: string): void {
|
||||
this.transactions.delete(txnId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all pending writes for a namespace
|
||||
*/
|
||||
getPendingWrites(txnId: string, dbName: string, collName: string): Map<string, { op: 'insert' | 'update' | 'delete'; doc?: IStoredDocument }> | undefined {
|
||||
const txn = this.transactions.get(txnId);
|
||||
if (!txn) return undefined;
|
||||
|
||||
const ns = `${dbName}.${collName}`;
|
||||
return txn.writeSet.get(ns);
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a callback within a transaction, with automatic retry on conflict
|
||||
*/
|
||||
async withTransaction<T>(
|
||||
sessionId: string,
|
||||
callback: (txnId: string) => Promise<T>,
|
||||
options?: ITransactionOptions & { maxRetries?: number }
|
||||
): Promise<T> {
|
||||
const maxRetries = options?.maxRetries ?? 3;
|
||||
let lastError: Error | undefined;
|
||||
|
||||
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
||||
const txnId = this.startTransaction(sessionId, options);
|
||||
|
||||
try {
|
||||
const result = await callback(txnId);
|
||||
await this.commitTransaction(txnId);
|
||||
this.endTransaction(txnId);
|
||||
return result;
|
||||
} catch (error: any) {
|
||||
await this.abortTransaction(txnId);
|
||||
this.endTransaction(txnId);
|
||||
|
||||
if (error instanceof TsmdbWriteConflictError && attempt < maxRetries - 1) {
|
||||
// Retry on write conflict
|
||||
lastError = error;
|
||||
continue;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError || new TsmdbTransactionError('Transaction failed after max retries');
|
||||
}
|
||||
}
|
||||
506
ts/ts_tsmdb/engine/UpdateEngine.ts
Normal file
506
ts/ts_tsmdb/engine/UpdateEngine.ts
Normal file
@@ -0,0 +1,506 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import type { Document, IStoredDocument } from '../types/interfaces.js';
|
||||
import { QueryEngine } from './QueryEngine.js';
|
||||
|
||||
/**
|
||||
* Update engine for MongoDB-compatible update operations
|
||||
*/
|
||||
export class UpdateEngine {
|
||||
/**
|
||||
* Apply an update specification to a document
|
||||
* Returns the updated document or null if no update was applied
|
||||
*/
|
||||
static applyUpdate(document: IStoredDocument, update: Document, arrayFilters?: Document[]): IStoredDocument {
|
||||
// Check if this is an aggregation pipeline update
|
||||
if (Array.isArray(update)) {
|
||||
// Aggregation pipeline updates are not yet supported
|
||||
throw new Error('Aggregation pipeline updates are not yet supported');
|
||||
}
|
||||
|
||||
// Check if this is a replacement (no $ operators at top level)
|
||||
const hasOperators = Object.keys(update).some(k => k.startsWith('$'));
|
||||
|
||||
if (!hasOperators) {
|
||||
// This is a replacement - preserve _id
|
||||
return {
|
||||
_id: document._id,
|
||||
...update,
|
||||
};
|
||||
}
|
||||
|
||||
// Apply update operators
|
||||
const result = this.deepClone(document);
|
||||
|
||||
for (const [operator, operand] of Object.entries(update)) {
|
||||
switch (operator) {
|
||||
case '$set':
|
||||
this.applySet(result, operand);
|
||||
break;
|
||||
case '$unset':
|
||||
this.applyUnset(result, operand);
|
||||
break;
|
||||
case '$inc':
|
||||
this.applyInc(result, operand);
|
||||
break;
|
||||
case '$mul':
|
||||
this.applyMul(result, operand);
|
||||
break;
|
||||
case '$min':
|
||||
this.applyMin(result, operand);
|
||||
break;
|
||||
case '$max':
|
||||
this.applyMax(result, operand);
|
||||
break;
|
||||
case '$rename':
|
||||
this.applyRename(result, operand);
|
||||
break;
|
||||
case '$currentDate':
|
||||
this.applyCurrentDate(result, operand);
|
||||
break;
|
||||
case '$setOnInsert':
|
||||
// Only applied during upsert insert, handled elsewhere
|
||||
break;
|
||||
case '$push':
|
||||
this.applyPush(result, operand, arrayFilters);
|
||||
break;
|
||||
case '$pop':
|
||||
this.applyPop(result, operand);
|
||||
break;
|
||||
case '$pull':
|
||||
this.applyPull(result, operand, arrayFilters);
|
||||
break;
|
||||
case '$pullAll':
|
||||
this.applyPullAll(result, operand);
|
||||
break;
|
||||
case '$addToSet':
|
||||
this.applyAddToSet(result, operand);
|
||||
break;
|
||||
case '$bit':
|
||||
this.applyBit(result, operand);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknown update operator: ${operator}`);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply $setOnInsert for upsert operations
|
||||
*/
|
||||
static applySetOnInsert(document: IStoredDocument, setOnInsert: Document): IStoredDocument {
|
||||
const result = this.deepClone(document);
|
||||
this.applySet(result, setOnInsert);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Deep clone a document
|
||||
*/
|
||||
private static deepClone(obj: any): any {
|
||||
if (obj === null || typeof obj !== 'object') {
|
||||
return obj;
|
||||
}
|
||||
|
||||
if (obj instanceof plugins.bson.ObjectId) {
|
||||
return new plugins.bson.ObjectId(obj.toHexString());
|
||||
}
|
||||
|
||||
if (obj instanceof Date) {
|
||||
return new Date(obj.getTime());
|
||||
}
|
||||
|
||||
if (obj instanceof plugins.bson.Timestamp) {
|
||||
return new plugins.bson.Timestamp({ t: obj.high, i: obj.low });
|
||||
}
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
return obj.map(item => this.deepClone(item));
|
||||
}
|
||||
|
||||
const cloned: any = {};
|
||||
for (const key of Object.keys(obj)) {
|
||||
cloned[key] = this.deepClone(obj[key]);
|
||||
}
|
||||
return cloned;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a nested value
|
||||
*/
|
||||
private static setNestedValue(obj: any, path: string, value: any): void {
|
||||
const parts = path.split('.');
|
||||
let current = obj;
|
||||
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
const part = parts[i];
|
||||
|
||||
// Handle array index notation
|
||||
const arrayMatch = part.match(/^(\w+)\[(\d+)\]$/);
|
||||
if (arrayMatch) {
|
||||
const [, fieldName, indexStr] = arrayMatch;
|
||||
const index = parseInt(indexStr, 10);
|
||||
if (!(fieldName in current)) {
|
||||
current[fieldName] = [];
|
||||
}
|
||||
if (!current[fieldName][index]) {
|
||||
current[fieldName][index] = {};
|
||||
}
|
||||
current = current[fieldName][index];
|
||||
continue;
|
||||
}
|
||||
|
||||
// Handle numeric index (array positional)
|
||||
const numIndex = parseInt(part, 10);
|
||||
if (!isNaN(numIndex) && Array.isArray(current)) {
|
||||
if (!current[numIndex]) {
|
||||
current[numIndex] = {};
|
||||
}
|
||||
current = current[numIndex];
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!(part in current) || current[part] === null) {
|
||||
current[part] = {};
|
||||
}
|
||||
current = current[part];
|
||||
}
|
||||
|
||||
const lastPart = parts[parts.length - 1];
|
||||
const numIndex = parseInt(lastPart, 10);
|
||||
if (!isNaN(numIndex) && Array.isArray(current)) {
|
||||
current[numIndex] = value;
|
||||
} else {
|
||||
current[lastPart] = value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a nested value
|
||||
*/
|
||||
private static getNestedValue(obj: any, path: string): any {
|
||||
return QueryEngine.getNestedValue(obj, path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a nested value
|
||||
*/
|
||||
private static deleteNestedValue(obj: any, path: string): void {
|
||||
const parts = path.split('.');
|
||||
let current = obj;
|
||||
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
const part = parts[i];
|
||||
if (!(part in current)) {
|
||||
return;
|
||||
}
|
||||
current = current[part];
|
||||
}
|
||||
|
||||
delete current[parts[parts.length - 1]];
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Field Update Operators
|
||||
// ============================================================================
|
||||
|
||||
private static applySet(doc: any, fields: Document): void {
|
||||
for (const [path, value] of Object.entries(fields)) {
|
||||
this.setNestedValue(doc, path, this.deepClone(value));
|
||||
}
|
||||
}
|
||||
|
||||
private static applyUnset(doc: any, fields: Document): void {
|
||||
for (const path of Object.keys(fields)) {
|
||||
this.deleteNestedValue(doc, path);
|
||||
}
|
||||
}
|
||||
|
||||
private static applyInc(doc: any, fields: Document): void {
|
||||
for (const [path, value] of Object.entries(fields)) {
|
||||
const current = this.getNestedValue(doc, path) || 0;
|
||||
if (typeof current !== 'number') {
|
||||
throw new Error(`Cannot apply $inc to non-numeric field: ${path}`);
|
||||
}
|
||||
this.setNestedValue(doc, path, current + (value as number));
|
||||
}
|
||||
}
|
||||
|
||||
private static applyMul(doc: any, fields: Document): void {
|
||||
for (const [path, value] of Object.entries(fields)) {
|
||||
const current = this.getNestedValue(doc, path) || 0;
|
||||
if (typeof current !== 'number') {
|
||||
throw new Error(`Cannot apply $mul to non-numeric field: ${path}`);
|
||||
}
|
||||
this.setNestedValue(doc, path, current * (value as number));
|
||||
}
|
||||
}
|
||||
|
||||
private static applyMin(doc: any, fields: Document): void {
|
||||
for (const [path, value] of Object.entries(fields)) {
|
||||
const current = this.getNestedValue(doc, path);
|
||||
if (current === undefined || this.compareValues(value, current) < 0) {
|
||||
this.setNestedValue(doc, path, this.deepClone(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static applyMax(doc: any, fields: Document): void {
|
||||
for (const [path, value] of Object.entries(fields)) {
|
||||
const current = this.getNestedValue(doc, path);
|
||||
if (current === undefined || this.compareValues(value, current) > 0) {
|
||||
this.setNestedValue(doc, path, this.deepClone(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static applyRename(doc: any, fields: Document): void {
|
||||
for (const [oldPath, newPath] of Object.entries(fields)) {
|
||||
const value = this.getNestedValue(doc, oldPath);
|
||||
if (value !== undefined) {
|
||||
this.deleteNestedValue(doc, oldPath);
|
||||
this.setNestedValue(doc, newPath as string, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static applyCurrentDate(doc: any, fields: Document): void {
|
||||
for (const [path, spec] of Object.entries(fields)) {
|
||||
if (spec === true) {
|
||||
this.setNestedValue(doc, path, new Date());
|
||||
} else if (typeof spec === 'object' && spec.$type === 'date') {
|
||||
this.setNestedValue(doc, path, new Date());
|
||||
} else if (typeof spec === 'object' && spec.$type === 'timestamp') {
|
||||
this.setNestedValue(doc, path, new plugins.bson.Timestamp({ t: Math.floor(Date.now() / 1000), i: 0 }));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Array Update Operators
|
||||
// ============================================================================
|
||||
|
||||
private static applyPush(doc: any, fields: Document, arrayFilters?: Document[]): void {
|
||||
for (const [path, spec] of Object.entries(fields)) {
|
||||
let arr = this.getNestedValue(doc, path);
|
||||
if (arr === undefined) {
|
||||
arr = [];
|
||||
this.setNestedValue(doc, path, arr);
|
||||
}
|
||||
if (!Array.isArray(arr)) {
|
||||
throw new Error(`Cannot apply $push to non-array field: ${path}`);
|
||||
}
|
||||
|
||||
if (spec && typeof spec === 'object' && '$each' in spec) {
|
||||
// $push with modifiers
|
||||
let elements = (spec.$each as any[]).map(e => this.deepClone(e));
|
||||
const position = spec.$position as number | undefined;
|
||||
const slice = spec.$slice as number | undefined;
|
||||
const sortSpec = spec.$sort;
|
||||
|
||||
if (position !== undefined) {
|
||||
arr.splice(position, 0, ...elements);
|
||||
} else {
|
||||
arr.push(...elements);
|
||||
}
|
||||
|
||||
if (sortSpec !== undefined) {
|
||||
if (typeof sortSpec === 'number') {
|
||||
arr.sort((a, b) => (a - b) * sortSpec);
|
||||
} else {
|
||||
// Sort by field(s)
|
||||
const entries = Object.entries(sortSpec as Document);
|
||||
arr.sort((a, b) => {
|
||||
for (const [field, dir] of entries) {
|
||||
const av = this.getNestedValue(a, field);
|
||||
const bv = this.getNestedValue(b, field);
|
||||
const cmp = this.compareValues(av, bv) * (dir as number);
|
||||
if (cmp !== 0) return cmp;
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (slice !== undefined) {
|
||||
if (slice >= 0) {
|
||||
arr.splice(slice);
|
||||
} else {
|
||||
arr.splice(0, arr.length + slice);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Simple push
|
||||
arr.push(this.deepClone(spec));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static applyPop(doc: any, fields: Document): void {
|
||||
for (const [path, direction] of Object.entries(fields)) {
|
||||
const arr = this.getNestedValue(doc, path);
|
||||
if (!Array.isArray(arr)) {
|
||||
throw new Error(`Cannot apply $pop to non-array field: ${path}`);
|
||||
}
|
||||
|
||||
if ((direction as number) === 1) {
|
||||
arr.pop();
|
||||
} else {
|
||||
arr.shift();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static applyPull(doc: any, fields: Document, arrayFilters?: Document[]): void {
|
||||
for (const [path, condition] of Object.entries(fields)) {
|
||||
const arr = this.getNestedValue(doc, path);
|
||||
if (!Array.isArray(arr)) {
|
||||
continue; // Skip if not an array
|
||||
}
|
||||
|
||||
if (typeof condition === 'object' && condition !== null && !Array.isArray(condition)) {
|
||||
// Condition is a query filter
|
||||
const hasOperators = Object.keys(condition).some(k => k.startsWith('$'));
|
||||
if (hasOperators) {
|
||||
// Filter using query operators
|
||||
const remaining = arr.filter(item => !QueryEngine.matches(item, condition));
|
||||
arr.length = 0;
|
||||
arr.push(...remaining);
|
||||
} else {
|
||||
// Match documents with all specified fields
|
||||
const remaining = arr.filter(item => {
|
||||
if (typeof item !== 'object' || item === null) {
|
||||
return true;
|
||||
}
|
||||
return !Object.entries(condition).every(([k, v]) => {
|
||||
const itemVal = this.getNestedValue(item, k);
|
||||
return this.valuesEqual(itemVal, v);
|
||||
});
|
||||
});
|
||||
arr.length = 0;
|
||||
arr.push(...remaining);
|
||||
}
|
||||
} else {
|
||||
// Direct value match
|
||||
const remaining = arr.filter(item => !this.valuesEqual(item, condition));
|
||||
arr.length = 0;
|
||||
arr.push(...remaining);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static applyPullAll(doc: any, fields: Document): void {
|
||||
for (const [path, values] of Object.entries(fields)) {
|
||||
const arr = this.getNestedValue(doc, path);
|
||||
if (!Array.isArray(arr)) {
|
||||
continue;
|
||||
}
|
||||
if (!Array.isArray(values)) {
|
||||
throw new Error(`$pullAll requires an array argument`);
|
||||
}
|
||||
|
||||
const valueSet = new Set(values.map(v => JSON.stringify(v)));
|
||||
const remaining = arr.filter(item => !valueSet.has(JSON.stringify(item)));
|
||||
arr.length = 0;
|
||||
arr.push(...remaining);
|
||||
}
|
||||
}
|
||||
|
||||
private static applyAddToSet(doc: any, fields: Document): void {
|
||||
for (const [path, spec] of Object.entries(fields)) {
|
||||
let arr = this.getNestedValue(doc, path);
|
||||
if (arr === undefined) {
|
||||
arr = [];
|
||||
this.setNestedValue(doc, path, arr);
|
||||
}
|
||||
if (!Array.isArray(arr)) {
|
||||
throw new Error(`Cannot apply $addToSet to non-array field: ${path}`);
|
||||
}
|
||||
|
||||
const existingSet = new Set(arr.map(v => JSON.stringify(v)));
|
||||
|
||||
if (spec && typeof spec === 'object' && '$each' in spec) {
|
||||
for (const item of spec.$each as any[]) {
|
||||
const key = JSON.stringify(item);
|
||||
if (!existingSet.has(key)) {
|
||||
arr.push(this.deepClone(item));
|
||||
existingSet.add(key);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const key = JSON.stringify(spec);
|
||||
if (!existingSet.has(key)) {
|
||||
arr.push(this.deepClone(spec));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static applyBit(doc: any, fields: Document): void {
|
||||
for (const [path, operations] of Object.entries(fields)) {
|
||||
let current = this.getNestedValue(doc, path) || 0;
|
||||
if (typeof current !== 'number') {
|
||||
throw new Error(`Cannot apply $bit to non-numeric field: ${path}`);
|
||||
}
|
||||
|
||||
for (const [op, value] of Object.entries(operations as Document)) {
|
||||
switch (op) {
|
||||
case 'and':
|
||||
current = current & (value as number);
|
||||
break;
|
||||
case 'or':
|
||||
current = current | (value as number);
|
||||
break;
|
||||
case 'xor':
|
||||
current = current ^ (value as number);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
this.setNestedValue(doc, path, current);
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Helper Methods
|
||||
// ============================================================================
|
||||
|
||||
private static compareValues(a: any, b: any): number {
|
||||
if (a === b) return 0;
|
||||
if (a === null || a === undefined) return -1;
|
||||
if (b === null || b === undefined) return 1;
|
||||
|
||||
if (typeof a === 'number' && typeof b === 'number') {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
if (a instanceof Date && b instanceof Date) {
|
||||
return a.getTime() - b.getTime();
|
||||
}
|
||||
|
||||
if (typeof a === 'string' && typeof b === 'string') {
|
||||
return a.localeCompare(b);
|
||||
}
|
||||
|
||||
return String(a).localeCompare(String(b));
|
||||
}
|
||||
|
||||
private static valuesEqual(a: any, b: any): boolean {
|
||||
if (a === b) return true;
|
||||
|
||||
if (a instanceof plugins.bson.ObjectId && b instanceof plugins.bson.ObjectId) {
|
||||
return a.equals(b);
|
||||
}
|
||||
|
||||
if (a instanceof Date && b instanceof Date) {
|
||||
return a.getTime() === b.getTime();
|
||||
}
|
||||
|
||||
if (typeof a === 'object' && typeof b === 'object' && a !== null && b !== null) {
|
||||
return JSON.stringify(a) === JSON.stringify(b);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user