BREAKING CHANGE(tsmdb): rename CongoDB to TsmDB and relocate/rename wire-protocol server implementation and public exports
This commit is contained in:
283
ts/tsmdb/engine/AggregationEngine.ts
Normal file
283
ts/tsmdb/engine/AggregationEngine.ts
Normal file
@@ -0,0 +1,283 @@
|
||||
import * as plugins from '../tsmdb.plugins.js';
|
||||
import type { Document, IStoredDocument, IAggregateOptions } from '../types/interfaces.js';
|
||||
|
||||
// Import mingo Aggregator
|
||||
import { Aggregator } from 'mingo';
|
||||
|
||||
/**
|
||||
* Aggregation engine using mingo for MongoDB-compatible aggregation pipeline execution
|
||||
*/
|
||||
export class AggregationEngine {
|
||||
/**
|
||||
* Execute an aggregation pipeline on a collection of documents
|
||||
*/
|
||||
static aggregate(
|
||||
documents: IStoredDocument[],
|
||||
pipeline: Document[],
|
||||
options?: IAggregateOptions
|
||||
): Document[] {
|
||||
if (!pipeline || pipeline.length === 0) {
|
||||
return documents;
|
||||
}
|
||||
|
||||
// Create mingo aggregator with the pipeline
|
||||
const aggregator = new Aggregator(pipeline, {
|
||||
collation: options?.collation as any,
|
||||
});
|
||||
|
||||
// Run the aggregation
|
||||
const result = aggregator.run(documents);
|
||||
|
||||
return Array.isArray(result) ? result : [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute aggregation and return an iterator for lazy evaluation
|
||||
*/
|
||||
static *aggregateIterator(
|
||||
documents: IStoredDocument[],
|
||||
pipeline: Document[],
|
||||
options?: IAggregateOptions
|
||||
): Generator<Document> {
|
||||
const aggregator = new Aggregator(pipeline, {
|
||||
collation: options?.collation as any,
|
||||
});
|
||||
|
||||
// Get the cursor from mingo
|
||||
const cursor = aggregator.stream(documents);
|
||||
|
||||
for (const doc of cursor) {
|
||||
yield doc;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a $lookup stage manually (for cross-collection lookups)
|
||||
* This is used when the lookup references another collection in the same database
|
||||
*/
|
||||
static executeLookup(
|
||||
documents: IStoredDocument[],
|
||||
lookupSpec: {
|
||||
from: string;
|
||||
localField: string;
|
||||
foreignField: string;
|
||||
as: string;
|
||||
},
|
||||
foreignCollection: IStoredDocument[]
|
||||
): Document[] {
|
||||
const { localField, foreignField, as } = lookupSpec;
|
||||
|
||||
return documents.map(doc => {
|
||||
const localValue = this.getNestedValue(doc, localField);
|
||||
const matches = foreignCollection.filter(foreignDoc => {
|
||||
const foreignValue = this.getNestedValue(foreignDoc, foreignField);
|
||||
return this.valuesMatch(localValue, foreignValue);
|
||||
});
|
||||
|
||||
return {
|
||||
...doc,
|
||||
[as]: matches,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a $graphLookup stage manually
|
||||
*/
|
||||
static executeGraphLookup(
|
||||
documents: IStoredDocument[],
|
||||
graphLookupSpec: {
|
||||
from: string;
|
||||
startWith: string | Document;
|
||||
connectFromField: string;
|
||||
connectToField: string;
|
||||
as: string;
|
||||
maxDepth?: number;
|
||||
depthField?: string;
|
||||
restrictSearchWithMatch?: Document;
|
||||
},
|
||||
foreignCollection: IStoredDocument[]
|
||||
): Document[] {
|
||||
const {
|
||||
startWith,
|
||||
connectFromField,
|
||||
connectToField,
|
||||
as,
|
||||
maxDepth = 10,
|
||||
depthField,
|
||||
restrictSearchWithMatch,
|
||||
} = graphLookupSpec;
|
||||
|
||||
return documents.map(doc => {
|
||||
const startValue = typeof startWith === 'string' && startWith.startsWith('$')
|
||||
? this.getNestedValue(doc, startWith.slice(1))
|
||||
: startWith;
|
||||
|
||||
const results: Document[] = [];
|
||||
const visited = new Set<string>();
|
||||
const queue: Array<{ value: any; depth: number }> = [];
|
||||
|
||||
// Initialize with start value(s)
|
||||
const startValues = Array.isArray(startValue) ? startValue : [startValue];
|
||||
for (const val of startValues) {
|
||||
queue.push({ value: val, depth: 0 });
|
||||
}
|
||||
|
||||
while (queue.length > 0) {
|
||||
const { value, depth } = queue.shift()!;
|
||||
if (depth > maxDepth) continue;
|
||||
|
||||
const valueKey = JSON.stringify(value);
|
||||
if (visited.has(valueKey)) continue;
|
||||
visited.add(valueKey);
|
||||
|
||||
// Find matching documents
|
||||
for (const foreignDoc of foreignCollection) {
|
||||
const foreignValue = this.getNestedValue(foreignDoc, connectToField);
|
||||
|
||||
if (this.valuesMatch(value, foreignValue)) {
|
||||
// Check restrictSearchWithMatch
|
||||
if (restrictSearchWithMatch) {
|
||||
const matchQuery = new plugins.mingo.Query(restrictSearchWithMatch);
|
||||
if (!matchQuery.test(foreignDoc)) continue;
|
||||
}
|
||||
|
||||
const resultDoc = depthField
|
||||
? { ...foreignDoc, [depthField]: depth }
|
||||
: { ...foreignDoc };
|
||||
|
||||
// Avoid duplicates in results
|
||||
const docKey = foreignDoc._id.toHexString();
|
||||
if (!results.some(r => r._id?.toHexString?.() === docKey)) {
|
||||
results.push(resultDoc);
|
||||
|
||||
// Add connected values to queue
|
||||
const nextValue = this.getNestedValue(foreignDoc, connectFromField);
|
||||
if (nextValue !== undefined) {
|
||||
const nextValues = Array.isArray(nextValue) ? nextValue : [nextValue];
|
||||
for (const nv of nextValues) {
|
||||
queue.push({ value: nv, depth: depth + 1 });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...doc,
|
||||
[as]: results,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a $facet stage manually
|
||||
*/
|
||||
static executeFacet(
|
||||
documents: IStoredDocument[],
|
||||
facetSpec: Record<string, Document[]>
|
||||
): Document {
|
||||
const result: Document = {};
|
||||
|
||||
for (const [facetName, pipeline] of Object.entries(facetSpec)) {
|
||||
result[facetName] = this.aggregate(documents, pipeline);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a $unionWith stage
|
||||
*/
|
||||
static executeUnionWith(
|
||||
documents: IStoredDocument[],
|
||||
otherDocuments: IStoredDocument[],
|
||||
pipeline?: Document[]
|
||||
): Document[] {
|
||||
let unionDocs: Document[] = otherDocuments;
|
||||
if (pipeline && pipeline.length > 0) {
|
||||
unionDocs = this.aggregate(otherDocuments, pipeline);
|
||||
}
|
||||
return [...documents, ...unionDocs];
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a $merge stage (output to another collection)
|
||||
* Returns the documents that would be inserted/updated
|
||||
*/
|
||||
static prepareMerge(
|
||||
documents: Document[],
|
||||
mergeSpec: {
|
||||
into: string;
|
||||
on?: string | string[];
|
||||
whenMatched?: 'replace' | 'keepExisting' | 'merge' | 'fail' | Document[];
|
||||
whenNotMatched?: 'insert' | 'discard' | 'fail';
|
||||
}
|
||||
): {
|
||||
toInsert: Document[];
|
||||
toUpdate: Array<{ filter: Document; update: Document }>;
|
||||
onField: string | string[];
|
||||
whenMatched: string | Document[];
|
||||
whenNotMatched: string;
|
||||
} {
|
||||
const onField = mergeSpec.on || '_id';
|
||||
const whenMatched = mergeSpec.whenMatched || 'merge';
|
||||
const whenNotMatched = mergeSpec.whenNotMatched || 'insert';
|
||||
|
||||
return {
|
||||
toInsert: [],
|
||||
toUpdate: [],
|
||||
onField,
|
||||
whenMatched,
|
||||
whenNotMatched,
|
||||
};
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Helper Methods
|
||||
// ============================================================================
|
||||
|
||||
private static getNestedValue(obj: any, path: string): any {
|
||||
const parts = path.split('.');
|
||||
let current = obj;
|
||||
|
||||
for (const part of parts) {
|
||||
if (current === null || current === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
current = current[part];
|
||||
}
|
||||
|
||||
return current;
|
||||
}
|
||||
|
||||
private static valuesMatch(a: any, b: any): boolean {
|
||||
if (a === b) return true;
|
||||
|
||||
// Handle ObjectId comparison
|
||||
if (a instanceof plugins.bson.ObjectId && b instanceof plugins.bson.ObjectId) {
|
||||
return a.equals(b);
|
||||
}
|
||||
|
||||
// Handle array contains check
|
||||
if (Array.isArray(a)) {
|
||||
return a.some(item => this.valuesMatch(item, b));
|
||||
}
|
||||
if (Array.isArray(b)) {
|
||||
return b.some(item => this.valuesMatch(a, item));
|
||||
}
|
||||
|
||||
// Handle Date comparison
|
||||
if (a instanceof Date && b instanceof Date) {
|
||||
return a.getTime() === b.getTime();
|
||||
}
|
||||
|
||||
// Handle object comparison
|
||||
if (typeof a === 'object' && typeof b === 'object' && a !== null && b !== null) {
|
||||
return JSON.stringify(a) === JSON.stringify(b);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
479
ts/tsmdb/engine/IndexEngine.ts
Normal file
479
ts/tsmdb/engine/IndexEngine.ts
Normal file
@@ -0,0 +1,479 @@
|
||||
import * as plugins from '../tsmdb.plugins.js';
|
||||
import type { IStorageAdapter } from '../storage/IStorageAdapter.js';
|
||||
import type {
|
||||
Document,
|
||||
IStoredDocument,
|
||||
IIndexSpecification,
|
||||
IIndexInfo,
|
||||
ICreateIndexOptions,
|
||||
} from '../types/interfaces.js';
|
||||
import { TsmdbDuplicateKeyError, TsmdbIndexError } from '../errors/TsmdbErrors.js';
|
||||
import { QueryEngine } from './QueryEngine.js';
|
||||
|
||||
/**
|
||||
* Index data structure for fast lookups
|
||||
*/
|
||||
interface IIndexData {
|
||||
name: string;
|
||||
key: Record<string, 1 | -1 | string>;
|
||||
unique: boolean;
|
||||
sparse: boolean;
|
||||
expireAfterSeconds?: number;
|
||||
// Map from index key value to document _id(s)
|
||||
entries: Map<string, Set<string>>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Index engine for managing indexes and query optimization
|
||||
*/
|
||||
export class IndexEngine {
|
||||
private dbName: string;
|
||||
private collName: string;
|
||||
private storage: IStorageAdapter;
|
||||
private indexes: Map<string, IIndexData> = new Map();
|
||||
private initialized = false;
|
||||
|
||||
constructor(dbName: string, collName: string, storage: IStorageAdapter) {
|
||||
this.dbName = dbName;
|
||||
this.collName = collName;
|
||||
this.storage = storage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize indexes from storage
|
||||
*/
|
||||
async initialize(): Promise<void> {
|
||||
if (this.initialized) return;
|
||||
|
||||
const storedIndexes = await this.storage.getIndexes(this.dbName, this.collName);
|
||||
const documents = await this.storage.findAll(this.dbName, this.collName);
|
||||
|
||||
for (const indexSpec of storedIndexes) {
|
||||
const indexData: IIndexData = {
|
||||
name: indexSpec.name,
|
||||
key: indexSpec.key,
|
||||
unique: indexSpec.unique || false,
|
||||
sparse: indexSpec.sparse || false,
|
||||
expireAfterSeconds: indexSpec.expireAfterSeconds,
|
||||
entries: new Map(),
|
||||
};
|
||||
|
||||
// Build index entries
|
||||
for (const doc of documents) {
|
||||
const keyValue = this.extractKeyValue(doc, indexSpec.key);
|
||||
if (keyValue !== null || !indexData.sparse) {
|
||||
const keyStr = JSON.stringify(keyValue);
|
||||
if (!indexData.entries.has(keyStr)) {
|
||||
indexData.entries.set(keyStr, new Set());
|
||||
}
|
||||
indexData.entries.get(keyStr)!.add(doc._id.toHexString());
|
||||
}
|
||||
}
|
||||
|
||||
this.indexes.set(indexSpec.name, indexData);
|
||||
}
|
||||
|
||||
this.initialized = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new index
|
||||
*/
|
||||
async createIndex(
|
||||
key: Record<string, 1 | -1 | 'text' | '2dsphere'>,
|
||||
options?: ICreateIndexOptions
|
||||
): Promise<string> {
|
||||
await this.initialize();
|
||||
|
||||
// Generate index name if not provided
|
||||
const name = options?.name || this.generateIndexName(key);
|
||||
|
||||
// Check if index already exists
|
||||
if (this.indexes.has(name)) {
|
||||
return name;
|
||||
}
|
||||
|
||||
// Create index data structure
|
||||
const indexData: IIndexData = {
|
||||
name,
|
||||
key: key as Record<string, 1 | -1 | string>,
|
||||
unique: options?.unique || false,
|
||||
sparse: options?.sparse || false,
|
||||
expireAfterSeconds: options?.expireAfterSeconds,
|
||||
entries: new Map(),
|
||||
};
|
||||
|
||||
// Build index from existing documents
|
||||
const documents = await this.storage.findAll(this.dbName, this.collName);
|
||||
|
||||
for (const doc of documents) {
|
||||
const keyValue = this.extractKeyValue(doc, key);
|
||||
|
||||
if (keyValue === null && indexData.sparse) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const keyStr = JSON.stringify(keyValue);
|
||||
|
||||
if (indexData.unique && indexData.entries.has(keyStr)) {
|
||||
throw new TsmdbDuplicateKeyError(
|
||||
`E11000 duplicate key error index: ${this.dbName}.${this.collName}.$${name}`,
|
||||
key as Record<string, 1>,
|
||||
keyValue
|
||||
);
|
||||
}
|
||||
|
||||
if (!indexData.entries.has(keyStr)) {
|
||||
indexData.entries.set(keyStr, new Set());
|
||||
}
|
||||
indexData.entries.get(keyStr)!.add(doc._id.toHexString());
|
||||
}
|
||||
|
||||
// Store index
|
||||
this.indexes.set(name, indexData);
|
||||
await this.storage.saveIndex(this.dbName, this.collName, name, {
|
||||
key,
|
||||
unique: options?.unique,
|
||||
sparse: options?.sparse,
|
||||
expireAfterSeconds: options?.expireAfterSeconds,
|
||||
});
|
||||
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Drop an index
|
||||
*/
|
||||
async dropIndex(name: string): Promise<void> {
|
||||
await this.initialize();
|
||||
|
||||
if (name === '_id_') {
|
||||
throw new TsmdbIndexError('cannot drop _id index');
|
||||
}
|
||||
|
||||
if (!this.indexes.has(name)) {
|
||||
throw new TsmdbIndexError(`index not found: ${name}`);
|
||||
}
|
||||
|
||||
this.indexes.delete(name);
|
||||
await this.storage.dropIndex(this.dbName, this.collName, name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Drop all indexes except _id
|
||||
*/
|
||||
async dropAllIndexes(): Promise<void> {
|
||||
await this.initialize();
|
||||
|
||||
const names = Array.from(this.indexes.keys()).filter(n => n !== '_id_');
|
||||
for (const name of names) {
|
||||
this.indexes.delete(name);
|
||||
await this.storage.dropIndex(this.dbName, this.collName, name);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all indexes
|
||||
*/
|
||||
async listIndexes(): Promise<IIndexInfo[]> {
|
||||
await this.initialize();
|
||||
|
||||
return Array.from(this.indexes.values()).map(idx => ({
|
||||
v: 2,
|
||||
key: idx.key,
|
||||
name: idx.name,
|
||||
unique: idx.unique || undefined,
|
||||
sparse: idx.sparse || undefined,
|
||||
expireAfterSeconds: idx.expireAfterSeconds,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an index exists
|
||||
*/
|
||||
async indexExists(name: string): Promise<boolean> {
|
||||
await this.initialize();
|
||||
return this.indexes.has(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update index entries after document insert
|
||||
*/
|
||||
async onInsert(doc: IStoredDocument): Promise<void> {
|
||||
await this.initialize();
|
||||
|
||||
for (const [name, indexData] of this.indexes) {
|
||||
const keyValue = this.extractKeyValue(doc, indexData.key);
|
||||
|
||||
if (keyValue === null && indexData.sparse) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const keyStr = JSON.stringify(keyValue);
|
||||
|
||||
// Check unique constraint
|
||||
if (indexData.unique) {
|
||||
const existing = indexData.entries.get(keyStr);
|
||||
if (existing && existing.size > 0) {
|
||||
throw new TsmdbDuplicateKeyError(
|
||||
`E11000 duplicate key error collection: ${this.dbName}.${this.collName} index: ${name}`,
|
||||
indexData.key as Record<string, 1>,
|
||||
keyValue
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (!indexData.entries.has(keyStr)) {
|
||||
indexData.entries.set(keyStr, new Set());
|
||||
}
|
||||
indexData.entries.get(keyStr)!.add(doc._id.toHexString());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update index entries after document update
|
||||
*/
|
||||
async onUpdate(oldDoc: IStoredDocument, newDoc: IStoredDocument): Promise<void> {
|
||||
await this.initialize();
|
||||
|
||||
for (const [name, indexData] of this.indexes) {
|
||||
const oldKeyValue = this.extractKeyValue(oldDoc, indexData.key);
|
||||
const newKeyValue = this.extractKeyValue(newDoc, indexData.key);
|
||||
const oldKeyStr = JSON.stringify(oldKeyValue);
|
||||
const newKeyStr = JSON.stringify(newKeyValue);
|
||||
|
||||
// Remove old entry if key changed
|
||||
if (oldKeyStr !== newKeyStr) {
|
||||
if (oldKeyValue !== null || !indexData.sparse) {
|
||||
const oldSet = indexData.entries.get(oldKeyStr);
|
||||
if (oldSet) {
|
||||
oldSet.delete(oldDoc._id.toHexString());
|
||||
if (oldSet.size === 0) {
|
||||
indexData.entries.delete(oldKeyStr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add new entry
|
||||
if (newKeyValue !== null || !indexData.sparse) {
|
||||
// Check unique constraint
|
||||
if (indexData.unique) {
|
||||
const existing = indexData.entries.get(newKeyStr);
|
||||
if (existing && existing.size > 0) {
|
||||
throw new TsmdbDuplicateKeyError(
|
||||
`E11000 duplicate key error collection: ${this.dbName}.${this.collName} index: ${name}`,
|
||||
indexData.key as Record<string, 1>,
|
||||
newKeyValue
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (!indexData.entries.has(newKeyStr)) {
|
||||
indexData.entries.set(newKeyStr, new Set());
|
||||
}
|
||||
indexData.entries.get(newKeyStr)!.add(newDoc._id.toHexString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update index entries after document delete
|
||||
*/
|
||||
async onDelete(doc: IStoredDocument): Promise<void> {
|
||||
await this.initialize();
|
||||
|
||||
for (const indexData of this.indexes.values()) {
|
||||
const keyValue = this.extractKeyValue(doc, indexData.key);
|
||||
|
||||
if (keyValue === null && indexData.sparse) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const keyStr = JSON.stringify(keyValue);
|
||||
const set = indexData.entries.get(keyStr);
|
||||
if (set) {
|
||||
set.delete(doc._id.toHexString());
|
||||
if (set.size === 0) {
|
||||
indexData.entries.delete(keyStr);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the best index for a query
|
||||
*/
|
||||
selectIndex(filter: Document): { name: string; data: IIndexData } | null {
|
||||
if (!filter || Object.keys(filter).length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Get filter fields
|
||||
const filterFields = new Set(this.getFilterFields(filter));
|
||||
|
||||
// Score each index
|
||||
let bestIndex: { name: string; data: IIndexData } | null = null;
|
||||
let bestScore = 0;
|
||||
|
||||
for (const [name, indexData] of this.indexes) {
|
||||
const indexFields = Object.keys(indexData.key);
|
||||
let score = 0;
|
||||
|
||||
// Count how many index fields are in the filter
|
||||
for (const field of indexFields) {
|
||||
if (filterFields.has(field)) {
|
||||
score++;
|
||||
} else {
|
||||
break; // Index fields must be contiguous
|
||||
}
|
||||
}
|
||||
|
||||
// Prefer unique indexes
|
||||
if (indexData.unique && score > 0) {
|
||||
score += 0.5;
|
||||
}
|
||||
|
||||
if (score > bestScore) {
|
||||
bestScore = score;
|
||||
bestIndex = { name, data: indexData };
|
||||
}
|
||||
}
|
||||
|
||||
return bestIndex;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use index to find candidate document IDs
|
||||
*/
|
||||
async findCandidateIds(filter: Document): Promise<Set<string> | null> {
|
||||
await this.initialize();
|
||||
|
||||
const index = this.selectIndex(filter);
|
||||
if (!index) return null;
|
||||
|
||||
// Try to use the index for equality matches
|
||||
const indexFields = Object.keys(index.data.key);
|
||||
const equalityValues: Record<string, any> = {};
|
||||
|
||||
for (const field of indexFields) {
|
||||
const filterValue = this.getFilterValue(filter, field);
|
||||
if (filterValue === undefined) break;
|
||||
|
||||
// Only use equality matches for index lookup
|
||||
if (typeof filterValue === 'object' && filterValue !== null) {
|
||||
if (filterValue.$eq !== undefined) {
|
||||
equalityValues[field] = filterValue.$eq;
|
||||
} else if (filterValue.$in !== undefined) {
|
||||
// Handle $in with multiple lookups
|
||||
const results = new Set<string>();
|
||||
for (const val of filterValue.$in) {
|
||||
equalityValues[field] = val;
|
||||
const keyStr = JSON.stringify(this.buildKeyValue(equalityValues, index.data.key));
|
||||
const ids = index.data.entries.get(keyStr);
|
||||
if (ids) {
|
||||
for (const id of ids) {
|
||||
results.add(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
return results;
|
||||
} else {
|
||||
break; // Non-equality operator, stop here
|
||||
}
|
||||
} else {
|
||||
equalityValues[field] = filterValue;
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(equalityValues).length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const keyStr = JSON.stringify(this.buildKeyValue(equalityValues, index.data.key));
|
||||
return index.data.entries.get(keyStr) || new Set();
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Helper Methods
|
||||
// ============================================================================
|
||||
|
||||
private generateIndexName(key: Record<string, any>): string {
|
||||
return Object.entries(key)
|
||||
.map(([field, dir]) => `${field}_${dir}`)
|
||||
.join('_');
|
||||
}
|
||||
|
||||
private extractKeyValue(doc: Document, key: Record<string, any>): any {
|
||||
const values: any[] = [];
|
||||
|
||||
for (const field of Object.keys(key)) {
|
||||
const value = QueryEngine.getNestedValue(doc, field);
|
||||
values.push(value === undefined ? null : value);
|
||||
}
|
||||
|
||||
// For single-field index, return the value directly
|
||||
if (values.length === 1) {
|
||||
return values[0];
|
||||
}
|
||||
|
||||
return values;
|
||||
}
|
||||
|
||||
private buildKeyValue(values: Record<string, any>, key: Record<string, any>): any {
|
||||
const result: any[] = [];
|
||||
|
||||
for (const field of Object.keys(key)) {
|
||||
result.push(values[field] !== undefined ? values[field] : null);
|
||||
}
|
||||
|
||||
if (result.length === 1) {
|
||||
return result[0];
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private getFilterFields(filter: Document, prefix = ''): string[] {
|
||||
const fields: string[] = [];
|
||||
|
||||
for (const [key, value] of Object.entries(filter)) {
|
||||
if (key.startsWith('$')) {
|
||||
// Logical operator
|
||||
if (key === '$and' || key === '$or' || key === '$nor') {
|
||||
for (const subFilter of value as Document[]) {
|
||||
fields.push(...this.getFilterFields(subFilter, prefix));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const fullKey = prefix ? `${prefix}.${key}` : key;
|
||||
fields.push(fullKey);
|
||||
|
||||
// Check for nested filters
|
||||
if (typeof value === 'object' && value !== null && !Array.isArray(value)) {
|
||||
const subKeys = Object.keys(value);
|
||||
if (subKeys.length > 0 && !subKeys[0].startsWith('$')) {
|
||||
fields.push(...this.getFilterFields(value, fullKey));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
private getFilterValue(filter: Document, field: string): any {
|
||||
// Handle dot notation
|
||||
const parts = field.split('.');
|
||||
let current: any = filter;
|
||||
|
||||
for (const part of parts) {
|
||||
if (current === null || current === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
current = current[part];
|
||||
}
|
||||
|
||||
return current;
|
||||
}
|
||||
}
|
||||
301
ts/tsmdb/engine/QueryEngine.ts
Normal file
301
ts/tsmdb/engine/QueryEngine.ts
Normal file
@@ -0,0 +1,301 @@
|
||||
import * as plugins from '../tsmdb.plugins.js';
|
||||
import type { Document, IStoredDocument, ISortSpecification, ISortDirection } from '../types/interfaces.js';
|
||||
|
||||
// Import mingo Query class
|
||||
import { Query } from 'mingo';
|
||||
|
||||
/**
|
||||
* Query engine using mingo for MongoDB-compatible query matching
|
||||
*/
|
||||
export class QueryEngine {
|
||||
/**
|
||||
* Filter documents by a MongoDB query filter
|
||||
*/
|
||||
static filter(documents: IStoredDocument[], filter: Document): IStoredDocument[] {
|
||||
if (!filter || Object.keys(filter).length === 0) {
|
||||
return documents;
|
||||
}
|
||||
|
||||
const query = new Query(filter);
|
||||
return documents.filter(doc => query.test(doc));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test if a single document matches a filter
|
||||
*/
|
||||
static matches(document: Document, filter: Document): boolean {
|
||||
if (!filter || Object.keys(filter).length === 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const query = new Query(filter);
|
||||
return query.test(document);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find a single document matching the filter
|
||||
*/
|
||||
static findOne(documents: IStoredDocument[], filter: Document): IStoredDocument | null {
|
||||
if (!filter || Object.keys(filter).length === 0) {
|
||||
return documents[0] || null;
|
||||
}
|
||||
|
||||
const query = new Query(filter);
|
||||
for (const doc of documents) {
|
||||
if (query.test(doc)) {
|
||||
return doc;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sort documents by a sort specification
|
||||
*/
|
||||
static sort(documents: IStoredDocument[], sort: ISortSpecification): IStoredDocument[] {
|
||||
if (!sort) {
|
||||
return documents;
|
||||
}
|
||||
|
||||
// Normalize sort specification to array of [field, direction] pairs
|
||||
const sortFields: Array<[string, number]> = [];
|
||||
|
||||
if (Array.isArray(sort)) {
|
||||
for (const [field, direction] of sort) {
|
||||
sortFields.push([field, this.normalizeDirection(direction)]);
|
||||
}
|
||||
} else {
|
||||
for (const [field, direction] of Object.entries(sort)) {
|
||||
sortFields.push([field, this.normalizeDirection(direction)]);
|
||||
}
|
||||
}
|
||||
|
||||
return [...documents].sort((a, b) => {
|
||||
for (const [field, direction] of sortFields) {
|
||||
const aVal = this.getNestedValue(a, field);
|
||||
const bVal = this.getNestedValue(b, field);
|
||||
|
||||
const comparison = this.compareValues(aVal, bVal);
|
||||
if (comparison !== 0) {
|
||||
return comparison * direction;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply projection to documents
|
||||
*/
|
||||
static project(documents: IStoredDocument[], projection: Document): Document[] {
|
||||
if (!projection || Object.keys(projection).length === 0) {
|
||||
return documents;
|
||||
}
|
||||
|
||||
// Determine if this is inclusion or exclusion projection
|
||||
const keys = Object.keys(projection);
|
||||
const hasInclusion = keys.some(k => k !== '_id' && projection[k] === 1);
|
||||
const hasExclusion = keys.some(k => k !== '_id' && projection[k] === 0);
|
||||
|
||||
// Can't mix inclusion and exclusion (except for _id)
|
||||
if (hasInclusion && hasExclusion) {
|
||||
throw new Error('Cannot mix inclusion and exclusion in projection');
|
||||
}
|
||||
|
||||
return documents.map(doc => {
|
||||
if (hasInclusion) {
|
||||
// Inclusion projection
|
||||
const result: Document = {};
|
||||
|
||||
// Handle _id
|
||||
if (projection._id !== 0 && projection._id !== false) {
|
||||
result._id = doc._id;
|
||||
}
|
||||
|
||||
for (const key of keys) {
|
||||
if (key === '_id') continue;
|
||||
if (projection[key] === 1 || projection[key] === true) {
|
||||
const value = this.getNestedValue(doc, key);
|
||||
if (value !== undefined) {
|
||||
this.setNestedValue(result, key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
} else {
|
||||
// Exclusion projection - start with copy and remove fields
|
||||
const result = { ...doc };
|
||||
|
||||
for (const key of keys) {
|
||||
if (projection[key] === 0 || projection[key] === false) {
|
||||
this.deleteNestedValue(result, key);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get distinct values for a field
|
||||
*/
|
||||
static distinct(documents: IStoredDocument[], field: string, filter?: Document): any[] {
|
||||
let docs = documents;
|
||||
if (filter && Object.keys(filter).length > 0) {
|
||||
docs = this.filter(documents, filter);
|
||||
}
|
||||
|
||||
const values = new Set<any>();
|
||||
for (const doc of docs) {
|
||||
const value = this.getNestedValue(doc, field);
|
||||
if (value !== undefined) {
|
||||
if (Array.isArray(value)) {
|
||||
// For arrays, add each element
|
||||
for (const v of value) {
|
||||
values.add(this.toComparable(v));
|
||||
}
|
||||
} else {
|
||||
values.add(this.toComparable(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(values);
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize sort direction to 1 or -1
|
||||
*/
|
||||
private static normalizeDirection(direction: ISortDirection): number {
|
||||
if (typeof direction === 'number') {
|
||||
return direction > 0 ? 1 : -1;
|
||||
}
|
||||
if (direction === 'asc' || direction === 'ascending') {
|
||||
return 1;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a nested value from an object using dot notation
|
||||
*/
|
||||
static getNestedValue(obj: any, path: string): any {
|
||||
const parts = path.split('.');
|
||||
let current = obj;
|
||||
|
||||
for (const part of parts) {
|
||||
if (current === null || current === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (Array.isArray(current)) {
|
||||
// Handle array access
|
||||
const index = parseInt(part, 10);
|
||||
if (!isNaN(index)) {
|
||||
current = current[index];
|
||||
} else {
|
||||
// Get the field from all array elements
|
||||
return current.map(item => this.getNestedValue(item, part)).flat();
|
||||
}
|
||||
} else {
|
||||
current = current[part];
|
||||
}
|
||||
}
|
||||
|
||||
return current;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a nested value in an object using dot notation
|
||||
*/
|
||||
private static setNestedValue(obj: any, path: string, value: any): void {
|
||||
const parts = path.split('.');
|
||||
let current = obj;
|
||||
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
const part = parts[i];
|
||||
if (!(part in current)) {
|
||||
current[part] = {};
|
||||
}
|
||||
current = current[part];
|
||||
}
|
||||
|
||||
current[parts[parts.length - 1]] = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a nested value from an object using dot notation
|
||||
*/
|
||||
private static deleteNestedValue(obj: any, path: string): void {
|
||||
const parts = path.split('.');
|
||||
let current = obj;
|
||||
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
const part = parts[i];
|
||||
if (!(part in current)) {
|
||||
return;
|
||||
}
|
||||
current = current[part];
|
||||
}
|
||||
|
||||
delete current[parts[parts.length - 1]];
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare two values for sorting
|
||||
*/
|
||||
private static compareValues(a: any, b: any): number {
|
||||
// Handle undefined/null
|
||||
if (a === undefined && b === undefined) return 0;
|
||||
if (a === undefined) return -1;
|
||||
if (b === undefined) return 1;
|
||||
if (a === null && b === null) return 0;
|
||||
if (a === null) return -1;
|
||||
if (b === null) return 1;
|
||||
|
||||
// Handle ObjectId
|
||||
if (a instanceof plugins.bson.ObjectId && b instanceof plugins.bson.ObjectId) {
|
||||
return a.toHexString().localeCompare(b.toHexString());
|
||||
}
|
||||
|
||||
// Handle dates
|
||||
if (a instanceof Date && b instanceof Date) {
|
||||
return a.getTime() - b.getTime();
|
||||
}
|
||||
|
||||
// Handle numbers
|
||||
if (typeof a === 'number' && typeof b === 'number') {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// Handle strings
|
||||
if (typeof a === 'string' && typeof b === 'string') {
|
||||
return a.localeCompare(b);
|
||||
}
|
||||
|
||||
// Handle booleans
|
||||
if (typeof a === 'boolean' && typeof b === 'boolean') {
|
||||
return (a ? 1 : 0) - (b ? 1 : 0);
|
||||
}
|
||||
|
||||
// Fall back to string comparison
|
||||
return String(a).localeCompare(String(b));
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a value to a comparable form (for distinct)
|
||||
*/
|
||||
private static toComparable(value: any): any {
|
||||
if (value instanceof plugins.bson.ObjectId) {
|
||||
return value.toHexString();
|
||||
}
|
||||
if (value instanceof Date) {
|
||||
return value.toISOString();
|
||||
}
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
return JSON.stringify(value);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
}
|
||||
351
ts/tsmdb/engine/TransactionEngine.ts
Normal file
351
ts/tsmdb/engine/TransactionEngine.ts
Normal file
@@ -0,0 +1,351 @@
|
||||
import * as plugins from '../tsmdb.plugins.js';
|
||||
import type { IStorageAdapter } from '../storage/IStorageAdapter.js';
|
||||
import type { Document, IStoredDocument, ITransactionOptions } from '../types/interfaces.js';
|
||||
import { TsmdbTransactionError, TsmdbWriteConflictError } from '../errors/TsmdbErrors.js';
|
||||
|
||||
/**
|
||||
* Transaction state
|
||||
*/
|
||||
export interface ITransactionState {
|
||||
id: string;
|
||||
sessionId: string;
|
||||
startTime: plugins.bson.Timestamp;
|
||||
status: 'active' | 'committed' | 'aborted';
|
||||
readSet: Map<string, Set<string>>; // ns -> document _ids read
|
||||
writeSet: Map<string, Map<string, { op: 'insert' | 'update' | 'delete'; doc?: IStoredDocument; originalDoc?: IStoredDocument }>>; // ns -> _id -> operation
|
||||
snapshots: Map<string, IStoredDocument[]>; // ns -> snapshot of documents
|
||||
}
|
||||
|
||||
/**
|
||||
* Transaction engine for ACID transaction support
|
||||
*/
|
||||
export class TransactionEngine {
|
||||
private storage: IStorageAdapter;
|
||||
private transactions: Map<string, ITransactionState> = new Map();
|
||||
private txnCounter = 0;
|
||||
|
||||
constructor(storage: IStorageAdapter) {
|
||||
this.storage = storage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a new transaction
|
||||
*/
|
||||
startTransaction(sessionId: string, options?: ITransactionOptions): string {
|
||||
this.txnCounter++;
|
||||
const txnId = `txn_${sessionId}_${this.txnCounter}`;
|
||||
|
||||
const transaction: ITransactionState = {
|
||||
id: txnId,
|
||||
sessionId,
|
||||
startTime: new plugins.bson.Timestamp({ t: Math.floor(Date.now() / 1000), i: this.txnCounter }),
|
||||
status: 'active',
|
||||
readSet: new Map(),
|
||||
writeSet: new Map(),
|
||||
snapshots: new Map(),
|
||||
};
|
||||
|
||||
this.transactions.set(txnId, transaction);
|
||||
return txnId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a transaction by ID
|
||||
*/
|
||||
getTransaction(txnId: string): ITransactionState | undefined {
|
||||
return this.transactions.get(txnId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a transaction is active
|
||||
*/
|
||||
isActive(txnId: string): boolean {
|
||||
const txn = this.transactions.get(txnId);
|
||||
return txn?.status === 'active';
|
||||
}
|
||||
|
||||
/**
|
||||
* Get or create a snapshot for a namespace
|
||||
*/
|
||||
async getSnapshot(txnId: string, dbName: string, collName: string): Promise<IStoredDocument[]> {
|
||||
const txn = this.transactions.get(txnId);
|
||||
if (!txn || txn.status !== 'active') {
|
||||
throw new TsmdbTransactionError('Transaction is not active');
|
||||
}
|
||||
|
||||
const ns = `${dbName}.${collName}`;
|
||||
if (!txn.snapshots.has(ns)) {
|
||||
const snapshot = await this.storage.createSnapshot(dbName, collName);
|
||||
txn.snapshots.set(ns, snapshot);
|
||||
}
|
||||
|
||||
// Apply transaction writes to snapshot
|
||||
const snapshot = txn.snapshots.get(ns)!;
|
||||
const writes = txn.writeSet.get(ns);
|
||||
|
||||
if (!writes) {
|
||||
return snapshot;
|
||||
}
|
||||
|
||||
// Create a modified view of the snapshot
|
||||
const result: IStoredDocument[] = [];
|
||||
const deletedIds = new Set<string>();
|
||||
const modifiedDocs = new Map<string, IStoredDocument>();
|
||||
|
||||
for (const [idStr, write] of writes) {
|
||||
if (write.op === 'delete') {
|
||||
deletedIds.add(idStr);
|
||||
} else if (write.op === 'update' || write.op === 'insert') {
|
||||
if (write.doc) {
|
||||
modifiedDocs.set(idStr, write.doc);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add existing documents (not deleted, possibly modified)
|
||||
for (const doc of snapshot) {
|
||||
const idStr = doc._id.toHexString();
|
||||
if (deletedIds.has(idStr)) {
|
||||
continue;
|
||||
}
|
||||
if (modifiedDocs.has(idStr)) {
|
||||
result.push(modifiedDocs.get(idStr)!);
|
||||
modifiedDocs.delete(idStr);
|
||||
} else {
|
||||
result.push(doc);
|
||||
}
|
||||
}
|
||||
|
||||
// Add new documents (inserts)
|
||||
for (const doc of modifiedDocs.values()) {
|
||||
result.push(doc);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Record a read operation
|
||||
*/
|
||||
recordRead(txnId: string, dbName: string, collName: string, docIds: string[]): void {
|
||||
const txn = this.transactions.get(txnId);
|
||||
if (!txn || txn.status !== 'active') return;
|
||||
|
||||
const ns = `${dbName}.${collName}`;
|
||||
if (!txn.readSet.has(ns)) {
|
||||
txn.readSet.set(ns, new Set());
|
||||
}
|
||||
|
||||
const readSet = txn.readSet.get(ns)!;
|
||||
for (const id of docIds) {
|
||||
readSet.add(id);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Record a write operation (insert)
|
||||
*/
|
||||
recordInsert(txnId: string, dbName: string, collName: string, doc: IStoredDocument): void {
|
||||
const txn = this.transactions.get(txnId);
|
||||
if (!txn || txn.status !== 'active') {
|
||||
throw new TsmdbTransactionError('Transaction is not active');
|
||||
}
|
||||
|
||||
const ns = `${dbName}.${collName}`;
|
||||
if (!txn.writeSet.has(ns)) {
|
||||
txn.writeSet.set(ns, new Map());
|
||||
}
|
||||
|
||||
txn.writeSet.get(ns)!.set(doc._id.toHexString(), {
|
||||
op: 'insert',
|
||||
doc,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Record a write operation (update)
|
||||
*/
|
||||
recordUpdate(
|
||||
txnId: string,
|
||||
dbName: string,
|
||||
collName: string,
|
||||
originalDoc: IStoredDocument,
|
||||
updatedDoc: IStoredDocument
|
||||
): void {
|
||||
const txn = this.transactions.get(txnId);
|
||||
if (!txn || txn.status !== 'active') {
|
||||
throw new TsmdbTransactionError('Transaction is not active');
|
||||
}
|
||||
|
||||
const ns = `${dbName}.${collName}`;
|
||||
if (!txn.writeSet.has(ns)) {
|
||||
txn.writeSet.set(ns, new Map());
|
||||
}
|
||||
|
||||
const idStr = originalDoc._id.toHexString();
|
||||
const existing = txn.writeSet.get(ns)!.get(idStr);
|
||||
|
||||
// If we already have a write for this document, update it
|
||||
if (existing) {
|
||||
existing.doc = updatedDoc;
|
||||
} else {
|
||||
txn.writeSet.get(ns)!.set(idStr, {
|
||||
op: 'update',
|
||||
doc: updatedDoc,
|
||||
originalDoc,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Record a write operation (delete)
|
||||
*/
|
||||
recordDelete(txnId: string, dbName: string, collName: string, doc: IStoredDocument): void {
|
||||
const txn = this.transactions.get(txnId);
|
||||
if (!txn || txn.status !== 'active') {
|
||||
throw new TsmdbTransactionError('Transaction is not active');
|
||||
}
|
||||
|
||||
const ns = `${dbName}.${collName}`;
|
||||
if (!txn.writeSet.has(ns)) {
|
||||
txn.writeSet.set(ns, new Map());
|
||||
}
|
||||
|
||||
const idStr = doc._id.toHexString();
|
||||
const existing = txn.writeSet.get(ns)!.get(idStr);
|
||||
|
||||
if (existing && existing.op === 'insert') {
|
||||
// If we inserted and then deleted, just remove the write
|
||||
txn.writeSet.get(ns)!.delete(idStr);
|
||||
} else {
|
||||
txn.writeSet.get(ns)!.set(idStr, {
|
||||
op: 'delete',
|
||||
originalDoc: doc,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Commit a transaction
|
||||
*/
|
||||
async commitTransaction(txnId: string): Promise<void> {
|
||||
const txn = this.transactions.get(txnId);
|
||||
if (!txn) {
|
||||
throw new TsmdbTransactionError('Transaction not found');
|
||||
}
|
||||
if (txn.status !== 'active') {
|
||||
throw new TsmdbTransactionError(`Cannot commit transaction in state: ${txn.status}`);
|
||||
}
|
||||
|
||||
// Check for write conflicts
|
||||
for (const [ns, writes] of txn.writeSet) {
|
||||
const [dbName, collName] = ns.split('.');
|
||||
const ids = Array.from(writes.keys()).map(id => new plugins.bson.ObjectId(id));
|
||||
|
||||
const hasConflicts = await this.storage.hasConflicts(dbName, collName, ids, txn.startTime);
|
||||
if (hasConflicts) {
|
||||
txn.status = 'aborted';
|
||||
throw new TsmdbWriteConflictError();
|
||||
}
|
||||
}
|
||||
|
||||
// Apply all writes
|
||||
for (const [ns, writes] of txn.writeSet) {
|
||||
const [dbName, collName] = ns.split('.');
|
||||
|
||||
for (const [idStr, write] of writes) {
|
||||
switch (write.op) {
|
||||
case 'insert':
|
||||
if (write.doc) {
|
||||
await this.storage.insertOne(dbName, collName, write.doc);
|
||||
}
|
||||
break;
|
||||
case 'update':
|
||||
if (write.doc) {
|
||||
await this.storage.updateById(dbName, collName, new plugins.bson.ObjectId(idStr), write.doc);
|
||||
}
|
||||
break;
|
||||
case 'delete':
|
||||
await this.storage.deleteById(dbName, collName, new plugins.bson.ObjectId(idStr));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
txn.status = 'committed';
|
||||
}
|
||||
|
||||
/**
|
||||
* Abort a transaction
|
||||
*/
|
||||
async abortTransaction(txnId: string): Promise<void> {
|
||||
const txn = this.transactions.get(txnId);
|
||||
if (!txn) {
|
||||
throw new TsmdbTransactionError('Transaction not found');
|
||||
}
|
||||
if (txn.status !== 'active') {
|
||||
// Already committed or aborted, just return
|
||||
return;
|
||||
}
|
||||
|
||||
// Simply discard all buffered writes
|
||||
txn.writeSet.clear();
|
||||
txn.readSet.clear();
|
||||
txn.snapshots.clear();
|
||||
txn.status = 'aborted';
|
||||
}
|
||||
|
||||
/**
|
||||
* End a transaction (cleanup)
|
||||
*/
|
||||
endTransaction(txnId: string): void {
|
||||
this.transactions.delete(txnId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all pending writes for a namespace
|
||||
*/
|
||||
getPendingWrites(txnId: string, dbName: string, collName: string): Map<string, { op: 'insert' | 'update' | 'delete'; doc?: IStoredDocument }> | undefined {
|
||||
const txn = this.transactions.get(txnId);
|
||||
if (!txn) return undefined;
|
||||
|
||||
const ns = `${dbName}.${collName}`;
|
||||
return txn.writeSet.get(ns);
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a callback within a transaction, with automatic retry on conflict
|
||||
*/
|
||||
async withTransaction<T>(
|
||||
sessionId: string,
|
||||
callback: (txnId: string) => Promise<T>,
|
||||
options?: ITransactionOptions & { maxRetries?: number }
|
||||
): Promise<T> {
|
||||
const maxRetries = options?.maxRetries ?? 3;
|
||||
let lastError: Error | undefined;
|
||||
|
||||
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
||||
const txnId = this.startTransaction(sessionId, options);
|
||||
|
||||
try {
|
||||
const result = await callback(txnId);
|
||||
await this.commitTransaction(txnId);
|
||||
this.endTransaction(txnId);
|
||||
return result;
|
||||
} catch (error: any) {
|
||||
await this.abortTransaction(txnId);
|
||||
this.endTransaction(txnId);
|
||||
|
||||
if (error instanceof TsmdbWriteConflictError && attempt < maxRetries - 1) {
|
||||
// Retry on write conflict
|
||||
lastError = error;
|
||||
continue;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError || new TsmdbTransactionError('Transaction failed after max retries');
|
||||
}
|
||||
}
|
||||
506
ts/tsmdb/engine/UpdateEngine.ts
Normal file
506
ts/tsmdb/engine/UpdateEngine.ts
Normal file
@@ -0,0 +1,506 @@
|
||||
import * as plugins from '../tsmdb.plugins.js';
|
||||
import type { Document, IStoredDocument } from '../types/interfaces.js';
|
||||
import { QueryEngine } from './QueryEngine.js';
|
||||
|
||||
/**
|
||||
* Update engine for MongoDB-compatible update operations
|
||||
*/
|
||||
export class UpdateEngine {
|
||||
/**
|
||||
* Apply an update specification to a document
|
||||
* Returns the updated document or null if no update was applied
|
||||
*/
|
||||
static applyUpdate(document: IStoredDocument, update: Document, arrayFilters?: Document[]): IStoredDocument {
|
||||
// Check if this is an aggregation pipeline update
|
||||
if (Array.isArray(update)) {
|
||||
// Aggregation pipeline updates are not yet supported
|
||||
throw new Error('Aggregation pipeline updates are not yet supported');
|
||||
}
|
||||
|
||||
// Check if this is a replacement (no $ operators at top level)
|
||||
const hasOperators = Object.keys(update).some(k => k.startsWith('$'));
|
||||
|
||||
if (!hasOperators) {
|
||||
// This is a replacement - preserve _id
|
||||
return {
|
||||
_id: document._id,
|
||||
...update,
|
||||
};
|
||||
}
|
||||
|
||||
// Apply update operators
|
||||
const result = this.deepClone(document);
|
||||
|
||||
for (const [operator, operand] of Object.entries(update)) {
|
||||
switch (operator) {
|
||||
case '$set':
|
||||
this.applySet(result, operand);
|
||||
break;
|
||||
case '$unset':
|
||||
this.applyUnset(result, operand);
|
||||
break;
|
||||
case '$inc':
|
||||
this.applyInc(result, operand);
|
||||
break;
|
||||
case '$mul':
|
||||
this.applyMul(result, operand);
|
||||
break;
|
||||
case '$min':
|
||||
this.applyMin(result, operand);
|
||||
break;
|
||||
case '$max':
|
||||
this.applyMax(result, operand);
|
||||
break;
|
||||
case '$rename':
|
||||
this.applyRename(result, operand);
|
||||
break;
|
||||
case '$currentDate':
|
||||
this.applyCurrentDate(result, operand);
|
||||
break;
|
||||
case '$setOnInsert':
|
||||
// Only applied during upsert insert, handled elsewhere
|
||||
break;
|
||||
case '$push':
|
||||
this.applyPush(result, operand, arrayFilters);
|
||||
break;
|
||||
case '$pop':
|
||||
this.applyPop(result, operand);
|
||||
break;
|
||||
case '$pull':
|
||||
this.applyPull(result, operand, arrayFilters);
|
||||
break;
|
||||
case '$pullAll':
|
||||
this.applyPullAll(result, operand);
|
||||
break;
|
||||
case '$addToSet':
|
||||
this.applyAddToSet(result, operand);
|
||||
break;
|
||||
case '$bit':
|
||||
this.applyBit(result, operand);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknown update operator: ${operator}`);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply $setOnInsert for upsert operations
|
||||
*/
|
||||
static applySetOnInsert(document: IStoredDocument, setOnInsert: Document): IStoredDocument {
|
||||
const result = this.deepClone(document);
|
||||
this.applySet(result, setOnInsert);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Deep clone a document
|
||||
*/
|
||||
private static deepClone(obj: any): any {
|
||||
if (obj === null || typeof obj !== 'object') {
|
||||
return obj;
|
||||
}
|
||||
|
||||
if (obj instanceof plugins.bson.ObjectId) {
|
||||
return new plugins.bson.ObjectId(obj.toHexString());
|
||||
}
|
||||
|
||||
if (obj instanceof Date) {
|
||||
return new Date(obj.getTime());
|
||||
}
|
||||
|
||||
if (obj instanceof plugins.bson.Timestamp) {
|
||||
return new plugins.bson.Timestamp({ t: obj.high, i: obj.low });
|
||||
}
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
return obj.map(item => this.deepClone(item));
|
||||
}
|
||||
|
||||
const cloned: any = {};
|
||||
for (const key of Object.keys(obj)) {
|
||||
cloned[key] = this.deepClone(obj[key]);
|
||||
}
|
||||
return cloned;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a nested value
|
||||
*/
|
||||
private static setNestedValue(obj: any, path: string, value: any): void {
|
||||
const parts = path.split('.');
|
||||
let current = obj;
|
||||
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
const part = parts[i];
|
||||
|
||||
// Handle array index notation
|
||||
const arrayMatch = part.match(/^(\w+)\[(\d+)\]$/);
|
||||
if (arrayMatch) {
|
||||
const [, fieldName, indexStr] = arrayMatch;
|
||||
const index = parseInt(indexStr, 10);
|
||||
if (!(fieldName in current)) {
|
||||
current[fieldName] = [];
|
||||
}
|
||||
if (!current[fieldName][index]) {
|
||||
current[fieldName][index] = {};
|
||||
}
|
||||
current = current[fieldName][index];
|
||||
continue;
|
||||
}
|
||||
|
||||
// Handle numeric index (array positional)
|
||||
const numIndex = parseInt(part, 10);
|
||||
if (!isNaN(numIndex) && Array.isArray(current)) {
|
||||
if (!current[numIndex]) {
|
||||
current[numIndex] = {};
|
||||
}
|
||||
current = current[numIndex];
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!(part in current) || current[part] === null) {
|
||||
current[part] = {};
|
||||
}
|
||||
current = current[part];
|
||||
}
|
||||
|
||||
const lastPart = parts[parts.length - 1];
|
||||
const numIndex = parseInt(lastPart, 10);
|
||||
if (!isNaN(numIndex) && Array.isArray(current)) {
|
||||
current[numIndex] = value;
|
||||
} else {
|
||||
current[lastPart] = value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a nested value
|
||||
*/
|
||||
private static getNestedValue(obj: any, path: string): any {
|
||||
return QueryEngine.getNestedValue(obj, path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a nested value
|
||||
*/
|
||||
private static deleteNestedValue(obj: any, path: string): void {
|
||||
const parts = path.split('.');
|
||||
let current = obj;
|
||||
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
const part = parts[i];
|
||||
if (!(part in current)) {
|
||||
return;
|
||||
}
|
||||
current = current[part];
|
||||
}
|
||||
|
||||
delete current[parts[parts.length - 1]];
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Field Update Operators
|
||||
// ============================================================================
|
||||
|
||||
private static applySet(doc: any, fields: Document): void {
|
||||
for (const [path, value] of Object.entries(fields)) {
|
||||
this.setNestedValue(doc, path, this.deepClone(value));
|
||||
}
|
||||
}
|
||||
|
||||
private static applyUnset(doc: any, fields: Document): void {
|
||||
for (const path of Object.keys(fields)) {
|
||||
this.deleteNestedValue(doc, path);
|
||||
}
|
||||
}
|
||||
|
||||
private static applyInc(doc: any, fields: Document): void {
|
||||
for (const [path, value] of Object.entries(fields)) {
|
||||
const current = this.getNestedValue(doc, path) || 0;
|
||||
if (typeof current !== 'number') {
|
||||
throw new Error(`Cannot apply $inc to non-numeric field: ${path}`);
|
||||
}
|
||||
this.setNestedValue(doc, path, current + (value as number));
|
||||
}
|
||||
}
|
||||
|
||||
private static applyMul(doc: any, fields: Document): void {
|
||||
for (const [path, value] of Object.entries(fields)) {
|
||||
const current = this.getNestedValue(doc, path) || 0;
|
||||
if (typeof current !== 'number') {
|
||||
throw new Error(`Cannot apply $mul to non-numeric field: ${path}`);
|
||||
}
|
||||
this.setNestedValue(doc, path, current * (value as number));
|
||||
}
|
||||
}
|
||||
|
||||
private static applyMin(doc: any, fields: Document): void {
|
||||
for (const [path, value] of Object.entries(fields)) {
|
||||
const current = this.getNestedValue(doc, path);
|
||||
if (current === undefined || this.compareValues(value, current) < 0) {
|
||||
this.setNestedValue(doc, path, this.deepClone(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static applyMax(doc: any, fields: Document): void {
|
||||
for (const [path, value] of Object.entries(fields)) {
|
||||
const current = this.getNestedValue(doc, path);
|
||||
if (current === undefined || this.compareValues(value, current) > 0) {
|
||||
this.setNestedValue(doc, path, this.deepClone(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static applyRename(doc: any, fields: Document): void {
|
||||
for (const [oldPath, newPath] of Object.entries(fields)) {
|
||||
const value = this.getNestedValue(doc, oldPath);
|
||||
if (value !== undefined) {
|
||||
this.deleteNestedValue(doc, oldPath);
|
||||
this.setNestedValue(doc, newPath as string, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static applyCurrentDate(doc: any, fields: Document): void {
|
||||
for (const [path, spec] of Object.entries(fields)) {
|
||||
if (spec === true) {
|
||||
this.setNestedValue(doc, path, new Date());
|
||||
} else if (typeof spec === 'object' && spec.$type === 'date') {
|
||||
this.setNestedValue(doc, path, new Date());
|
||||
} else if (typeof spec === 'object' && spec.$type === 'timestamp') {
|
||||
this.setNestedValue(doc, path, new plugins.bson.Timestamp({ t: Math.floor(Date.now() / 1000), i: 0 }));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Array Update Operators
|
||||
// ============================================================================
|
||||
|
||||
private static applyPush(doc: any, fields: Document, arrayFilters?: Document[]): void {
|
||||
for (const [path, spec] of Object.entries(fields)) {
|
||||
let arr = this.getNestedValue(doc, path);
|
||||
if (arr === undefined) {
|
||||
arr = [];
|
||||
this.setNestedValue(doc, path, arr);
|
||||
}
|
||||
if (!Array.isArray(arr)) {
|
||||
throw new Error(`Cannot apply $push to non-array field: ${path}`);
|
||||
}
|
||||
|
||||
if (spec && typeof spec === 'object' && '$each' in spec) {
|
||||
// $push with modifiers
|
||||
let elements = (spec.$each as any[]).map(e => this.deepClone(e));
|
||||
const position = spec.$position as number | undefined;
|
||||
const slice = spec.$slice as number | undefined;
|
||||
const sortSpec = spec.$sort;
|
||||
|
||||
if (position !== undefined) {
|
||||
arr.splice(position, 0, ...elements);
|
||||
} else {
|
||||
arr.push(...elements);
|
||||
}
|
||||
|
||||
if (sortSpec !== undefined) {
|
||||
if (typeof sortSpec === 'number') {
|
||||
arr.sort((a, b) => (a - b) * sortSpec);
|
||||
} else {
|
||||
// Sort by field(s)
|
||||
const entries = Object.entries(sortSpec as Document);
|
||||
arr.sort((a, b) => {
|
||||
for (const [field, dir] of entries) {
|
||||
const av = this.getNestedValue(a, field);
|
||||
const bv = this.getNestedValue(b, field);
|
||||
const cmp = this.compareValues(av, bv) * (dir as number);
|
||||
if (cmp !== 0) return cmp;
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (slice !== undefined) {
|
||||
if (slice >= 0) {
|
||||
arr.splice(slice);
|
||||
} else {
|
||||
arr.splice(0, arr.length + slice);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Simple push
|
||||
arr.push(this.deepClone(spec));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static applyPop(doc: any, fields: Document): void {
|
||||
for (const [path, direction] of Object.entries(fields)) {
|
||||
const arr = this.getNestedValue(doc, path);
|
||||
if (!Array.isArray(arr)) {
|
||||
throw new Error(`Cannot apply $pop to non-array field: ${path}`);
|
||||
}
|
||||
|
||||
if ((direction as number) === 1) {
|
||||
arr.pop();
|
||||
} else {
|
||||
arr.shift();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static applyPull(doc: any, fields: Document, arrayFilters?: Document[]): void {
|
||||
for (const [path, condition] of Object.entries(fields)) {
|
||||
const arr = this.getNestedValue(doc, path);
|
||||
if (!Array.isArray(arr)) {
|
||||
continue; // Skip if not an array
|
||||
}
|
||||
|
||||
if (typeof condition === 'object' && condition !== null && !Array.isArray(condition)) {
|
||||
// Condition is a query filter
|
||||
const hasOperators = Object.keys(condition).some(k => k.startsWith('$'));
|
||||
if (hasOperators) {
|
||||
// Filter using query operators
|
||||
const remaining = arr.filter(item => !QueryEngine.matches(item, condition));
|
||||
arr.length = 0;
|
||||
arr.push(...remaining);
|
||||
} else {
|
||||
// Match documents with all specified fields
|
||||
const remaining = arr.filter(item => {
|
||||
if (typeof item !== 'object' || item === null) {
|
||||
return true;
|
||||
}
|
||||
return !Object.entries(condition).every(([k, v]) => {
|
||||
const itemVal = this.getNestedValue(item, k);
|
||||
return this.valuesEqual(itemVal, v);
|
||||
});
|
||||
});
|
||||
arr.length = 0;
|
||||
arr.push(...remaining);
|
||||
}
|
||||
} else {
|
||||
// Direct value match
|
||||
const remaining = arr.filter(item => !this.valuesEqual(item, condition));
|
||||
arr.length = 0;
|
||||
arr.push(...remaining);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static applyPullAll(doc: any, fields: Document): void {
|
||||
for (const [path, values] of Object.entries(fields)) {
|
||||
const arr = this.getNestedValue(doc, path);
|
||||
if (!Array.isArray(arr)) {
|
||||
continue;
|
||||
}
|
||||
if (!Array.isArray(values)) {
|
||||
throw new Error(`$pullAll requires an array argument`);
|
||||
}
|
||||
|
||||
const valueSet = new Set(values.map(v => JSON.stringify(v)));
|
||||
const remaining = arr.filter(item => !valueSet.has(JSON.stringify(item)));
|
||||
arr.length = 0;
|
||||
arr.push(...remaining);
|
||||
}
|
||||
}
|
||||
|
||||
private static applyAddToSet(doc: any, fields: Document): void {
|
||||
for (const [path, spec] of Object.entries(fields)) {
|
||||
let arr = this.getNestedValue(doc, path);
|
||||
if (arr === undefined) {
|
||||
arr = [];
|
||||
this.setNestedValue(doc, path, arr);
|
||||
}
|
||||
if (!Array.isArray(arr)) {
|
||||
throw new Error(`Cannot apply $addToSet to non-array field: ${path}`);
|
||||
}
|
||||
|
||||
const existingSet = new Set(arr.map(v => JSON.stringify(v)));
|
||||
|
||||
if (spec && typeof spec === 'object' && '$each' in spec) {
|
||||
for (const item of spec.$each as any[]) {
|
||||
const key = JSON.stringify(item);
|
||||
if (!existingSet.has(key)) {
|
||||
arr.push(this.deepClone(item));
|
||||
existingSet.add(key);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const key = JSON.stringify(spec);
|
||||
if (!existingSet.has(key)) {
|
||||
arr.push(this.deepClone(spec));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static applyBit(doc: any, fields: Document): void {
|
||||
for (const [path, operations] of Object.entries(fields)) {
|
||||
let current = this.getNestedValue(doc, path) || 0;
|
||||
if (typeof current !== 'number') {
|
||||
throw new Error(`Cannot apply $bit to non-numeric field: ${path}`);
|
||||
}
|
||||
|
||||
for (const [op, value] of Object.entries(operations as Document)) {
|
||||
switch (op) {
|
||||
case 'and':
|
||||
current = current & (value as number);
|
||||
break;
|
||||
case 'or':
|
||||
current = current | (value as number);
|
||||
break;
|
||||
case 'xor':
|
||||
current = current ^ (value as number);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
this.setNestedValue(doc, path, current);
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Helper Methods
|
||||
// ============================================================================
|
||||
|
||||
private static compareValues(a: any, b: any): number {
|
||||
if (a === b) return 0;
|
||||
if (a === null || a === undefined) return -1;
|
||||
if (b === null || b === undefined) return 1;
|
||||
|
||||
if (typeof a === 'number' && typeof b === 'number') {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
if (a instanceof Date && b instanceof Date) {
|
||||
return a.getTime() - b.getTime();
|
||||
}
|
||||
|
||||
if (typeof a === 'string' && typeof b === 'string') {
|
||||
return a.localeCompare(b);
|
||||
}
|
||||
|
||||
return String(a).localeCompare(String(b));
|
||||
}
|
||||
|
||||
private static valuesEqual(a: any, b: any): boolean {
|
||||
if (a === b) return true;
|
||||
|
||||
if (a instanceof plugins.bson.ObjectId && b instanceof plugins.bson.ObjectId) {
|
||||
return a.equals(b);
|
||||
}
|
||||
|
||||
if (a instanceof Date && b instanceof Date) {
|
||||
return a.getTime() === b.getTime();
|
||||
}
|
||||
|
||||
if (typeof a === 'object' && typeof b === 'object' && a !== null && b !== null) {
|
||||
return JSON.stringify(a) === JSON.stringify(b);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user