Compare commits
12 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| e6a36ecb5f | |||
| 6a37a773ea | |||
| 1fff277698 | |||
| 0ad7f316c4 | |||
| 0d450e7d4e | |||
| fff77fbd8e | |||
| 678bf15eb4 | |||
| aa45e9579b | |||
| e3dc19aa7c | |||
| 316af45b5e | |||
| 6932059965 | |||
| bd1764159e |
58
changelog.md
58
changelog.md
@@ -1,5 +1,63 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 2026-02-03 - 4.3.0 - feat(docs)
|
||||||
|
add LocalTsmDb documentation and examples; update README code samples and imports; correct examples and variable names; update package author
|
||||||
|
|
||||||
|
- Introduce LocalTsmDb: zero-config local database with automatic persistence, auto port discovery, and pre-connected client (added Quick Start, API, Features, and testing examples).
|
||||||
|
- Expand comparison table to include LocalTsmDb alongside SmartMongo and TsmDB.
|
||||||
|
- Update README examples: new LocalTsmDb usage, reorder options (LocalTsmDb, TsmDB, SmartMongo), rename test DB variable (db -> testDb), and adjust test snippets for Jest/Mocha and tap.
|
||||||
|
- Adjust code snippets and API notes: switch some example imports to use tsmdb, replace FileStorageAdapter references, change planner.createPlan to await planner.plan, and use wal.getEntriesAfter(...) without awaiting.
|
||||||
|
- Update package.json author from 'Lossless GmbH' to 'Task Venture Capital GmbH'.
|
||||||
|
|
||||||
|
## 2026-02-03 - 4.2.1 - fix(package.json)
|
||||||
|
replace main and typings with exports field pointing to ./dist_ts/index.js
|
||||||
|
|
||||||
|
- Added package.json exports field mapping "." to ./dist_ts/index.js to declare the package entrypoint.
|
||||||
|
- Removed main (dist_ts/index.js) and typings (dist_ts/index.d.ts) entries.
|
||||||
|
- Note: switching to exports improves Node resolution but removing the typings entry may affect TypeScript consumers expecting index.d.ts.
|
||||||
|
|
||||||
|
## 2026-02-01 - 4.2.0 - feat(tsmdb)
|
||||||
|
implement TsmDB Mongo-wire-compatible server, add storage/engine modules and reorganize exports
|
||||||
|
|
||||||
|
- Add full TsmDB implementation under ts/ts_tsmdb: wire protocol, server, command router, handlers, engines (Query, Update, Aggregation, Index, Transaction, Session), storage adapters (Memory, File), OpLog, WAL, utils and types.
|
||||||
|
- Remove legacy ts/tsmdb implementation and replace with new ts_tsmdb module exports.
|
||||||
|
- Introduce ts/ts_mongotools module and move SmartMongo class there; update top-level exports in ts/index.ts to export SmartMongo, tsmdb (from ts_tsmdb) and LocalTsmDb.
|
||||||
|
- Add LocalTsmDb convenience class (ts/ts_local) to start a file-backed TsmDB and return a connected MongoClient.
|
||||||
|
- Refactor plugin imports into per-module plugins files and add utilities (checksum, persistence, query planner, index engine).
|
||||||
|
|
||||||
|
## 2026-02-01 - 4.1.1 - fix(tsmdb)
|
||||||
|
add comprehensive unit tests for tsmdb components: checksum, query planner, index engine, session, and WAL
|
||||||
|
|
||||||
|
- Add new tests: test.tsmdb.checksum.ts — CRC32 and document checksum utilities (add/verify/remove)
|
||||||
|
- Add new tests: test.tsmdb.queryplanner.ts — QueryPlanner plans, index usage, selectivity, explain output, and edge cases
|
||||||
|
- Add new tests: test.tsmdb.indexengine.ts — Index creation, unique/sparse options, candidate selection, and constraints
|
||||||
|
- Add new tests: test.tsmdb.session.ts — Session lifecycle, touch/refresh/close, extractSessionId handling
|
||||||
|
- Add new tests: test.tsmdb.wal.ts — WAL initialization, LSN increments, logging/recovery for inserts/updates/deletes, binary and nested data handling
|
||||||
|
- Tests only — no production API changes; increases test coverage
|
||||||
|
- Recommend patch bump from 4.1.0 to 4.1.1
|
||||||
|
|
||||||
|
## 2026-02-01 - 4.1.0 - feat(readme)
|
||||||
|
expand README with storage integrity, WAL, query planner, session & transaction docs; update test script to enable verbose logging and increase timeout
|
||||||
|
|
||||||
|
- Updated npm test script to run tstest with --verbose, --logfile and --timeout 60 to improve test output and avoid timeouts.
|
||||||
|
- Extensive README additions: file storage adapter examples with checksum options, write-ahead logging (WAL) usage and recovery, query planner examples, index and query execution details, session and transaction examples and features.
|
||||||
|
- Wire protocol / features table updated to include Transactions and Sessions and added admin commands (dbStats, collStats).
|
||||||
|
- Architecture diagram and component list updated to include QueryPlanner, SessionEngine, TransactionEngine and WAL; storage layer annotated with checksums and WAL.
|
||||||
|
- Minor example import tweak: MongoClient import now includes Db type in test examples.
|
||||||
|
|
||||||
|
## 2026-02-01 - 4.0.0 - BREAKING CHANGE(storage,engine,server)
|
||||||
|
add session & transaction management, index/query planner, WAL and checksum support; integrate index-accelerated queries and update storage API (findByIds) to enable index optimizations
|
||||||
|
|
||||||
|
- Add SessionEngine with session lifecycle, auto-abort of transactions on expiry and session tracking in CommandRouter and AdminHandler.
|
||||||
|
- Introduce TransactionEngine integrations in CommandRouter and AdminHandler; handlers now support start/commit/abort transaction workflows.
|
||||||
|
- Add IndexEngine enhancements including a simple B-tree and hash map optimizations; integrate index usage into Find/Count/Insert/Update/Delete handlers for index-accelerated queries and index maintenance on mutations.
|
||||||
|
- Add QueryPlanner to choose IXSCAN vs COLLSCAN and provide explain plans.
|
||||||
|
- Add WAL (write-ahead log) for durability, with LSNs, checkpoints and recovery APIs.
|
||||||
|
- Add checksum utilities and FileStorageAdapter support for checksums (enableChecksums/strictChecksums), with verification on read and optional strict failure behavior.
|
||||||
|
- IStorageAdapter interface changed to include findByIds; MemoryStorageAdapter and FileStorageAdapter implement findByIds to support index lookups.
|
||||||
|
- Exported API additions: WAL, QueryPlanner, SessionEngine, checksum utilities; CommandRouter now caches IndexEngines and exposes transaction/session engines.
|
||||||
|
- Breaking change: the IStorageAdapter interface change requires third-party storage adapters to implement the new findByIds method.
|
||||||
|
|
||||||
## 2026-02-01 - 3.0.0 - BREAKING CHANGE(tsmdb)
|
## 2026-02-01 - 3.0.0 - BREAKING CHANGE(tsmdb)
|
||||||
rename CongoDB to TsmDB and relocate/rename wire-protocol server implementation and public exports
|
rename CongoDB to TsmDB and relocate/rename wire-protocol server implementation and public exports
|
||||||
|
|
||||||
|
|||||||
11
package.json
11
package.json
@@ -1,15 +1,16 @@
|
|||||||
{
|
{
|
||||||
"name": "@push.rocks/smartmongo",
|
"name": "@push.rocks/smartmongo",
|
||||||
"version": "3.0.0",
|
"version": "4.3.0",
|
||||||
"private": false,
|
"private": false,
|
||||||
"description": "A module for creating and managing a local MongoDB instance for testing purposes.",
|
"description": "A module for creating and managing a local MongoDB instance for testing purposes.",
|
||||||
"main": "dist_ts/index.js",
|
"exports": {
|
||||||
"typings": "dist_ts/index.d.ts",
|
".": "./dist_ts/index.js"
|
||||||
|
},
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"author": "Lossless GmbH",
|
"author": "Task Venture Capital GmbH",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "(tstest test/)",
|
"test": "(tstest test/. --verbose --logfile --timeout 60)",
|
||||||
"build": "(tsbuild --web)",
|
"build": "(tsbuild --web)",
|
||||||
"buildDocs": "tsdoc"
|
"buildDocs": "tsdoc"
|
||||||
},
|
},
|
||||||
|
|||||||
341
readme.md
341
readme.md
@@ -1,6 +1,6 @@
|
|||||||
# @push.rocks/smartmongo
|
# @push.rocks/smartmongo
|
||||||
|
|
||||||
A powerful MongoDB toolkit for testing and development — featuring both a real MongoDB memory server (**SmartMongo**) and an ultra-fast, lightweight wire-protocol-compatible in-memory database server (**TsmDB**). 🚀
|
A powerful MongoDB toolkit for testing and development — featuring a real MongoDB memory server (**SmartMongo**), an ultra-fast wire-protocol-compatible in-memory database server (**TsmDB**), and a zero-config local database (**LocalTsmDb**). 🚀
|
||||||
|
|
||||||
## Install
|
## Install
|
||||||
|
|
||||||
@@ -16,21 +16,79 @@ For reporting bugs, issues, or security vulnerabilities, please visit [community
|
|||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
`@push.rocks/smartmongo` provides two powerful approaches for MongoDB in testing and development:
|
`@push.rocks/smartmongo` provides three powerful approaches for MongoDB in testing and development:
|
||||||
|
|
||||||
| Feature | SmartMongo | TsmDB |
|
| Feature | SmartMongo | TsmDB | LocalTsmDb |
|
||||||
|---------|------------|---------|
|
|---------|------------|-------|------------|
|
||||||
| **Type** | Real MongoDB (memory server) | Pure TypeScript wire protocol server |
|
| **Type** | Real MongoDB (memory server) | Wire protocol server | Zero-config local DB |
|
||||||
| **Speed** | ~2-5s startup | ⚡ Instant startup (~5ms) |
|
| **Speed** | ~2-5s startup | ⚡ Instant (~5ms) | ⚡ Instant + auto-connect |
|
||||||
| **Compatibility** | 100% MongoDB | MongoDB driver compatible |
|
| **Compatibility** | 100% MongoDB | MongoDB driver compatible | MongoDB driver compatible |
|
||||||
| **Dependencies** | Downloads MongoDB binary | Zero external dependencies |
|
| **Dependencies** | Downloads MongoDB binary | Zero external deps | Zero external deps |
|
||||||
| **Replication** | ✅ Full replica set support | Single node emulation |
|
| **Replication** | ✅ Full replica set | Single node | Single node |
|
||||||
| **Use Case** | Integration testing | Unit testing, CI/CD |
|
| **Persistence** | Dump to directory | Memory or file | File-based (automatic) |
|
||||||
| **Persistence** | Dump to directory | Optional file/memory persistence |
|
| **Use Case** | Integration testing | Unit testing, CI/CD | Quick prototyping, local dev |
|
||||||
|
|
||||||
## 🚀 Quick Start
|
## 🚀 Quick Start
|
||||||
|
|
||||||
### Option 1: SmartMongo (Real MongoDB)
|
### Option 1: LocalTsmDb (Zero-Config Local Database) ⭐ NEW
|
||||||
|
|
||||||
|
The easiest way to get started — just point it at a folder and you have a persistent MongoDB-compatible database with automatic port discovery!
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { LocalTsmDb } from '@push.rocks/smartmongo';
|
||||||
|
|
||||||
|
// Create a local database backed by files
|
||||||
|
const db = new LocalTsmDb({ folderPath: './my-data' });
|
||||||
|
|
||||||
|
// Start and get a connected MongoDB client
|
||||||
|
const client = await db.start();
|
||||||
|
|
||||||
|
// Use exactly like MongoDB
|
||||||
|
const users = client.db('myapp').collection('users');
|
||||||
|
await users.insertOne({ name: 'Alice', email: 'alice@example.com' });
|
||||||
|
|
||||||
|
const user = await users.findOne({ name: 'Alice' });
|
||||||
|
console.log(user); // { _id: ObjectId(...), name: 'Alice', email: 'alice@example.com' }
|
||||||
|
|
||||||
|
// Data persists to disk automatically!
|
||||||
|
await db.stop();
|
||||||
|
|
||||||
|
// Later... data is still there
|
||||||
|
const db2 = new LocalTsmDb({ folderPath: './my-data' });
|
||||||
|
const client2 = await db2.start();
|
||||||
|
const savedUser = await client2.db('myapp').collection('users').findOne({ name: 'Alice' });
|
||||||
|
// savedUser exists!
|
||||||
|
```
|
||||||
|
|
||||||
|
### Option 2: TsmDB (Wire Protocol Server)
|
||||||
|
|
||||||
|
A lightweight, pure TypeScript MongoDB-compatible server — use the official `mongodb` driver directly!
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { tsmdb } from '@push.rocks/smartmongo';
|
||||||
|
import { MongoClient } from 'mongodb';
|
||||||
|
|
||||||
|
// Start TsmDB server
|
||||||
|
const server = new tsmdb.TsmdbServer({ port: 27017 });
|
||||||
|
await server.start();
|
||||||
|
|
||||||
|
// Connect with the official MongoDB driver
|
||||||
|
const client = new MongoClient('mongodb://127.0.0.1:27017');
|
||||||
|
await client.connect();
|
||||||
|
|
||||||
|
// Use exactly like real MongoDB
|
||||||
|
const db = client.db('myapp');
|
||||||
|
await db.collection('users').insertOne({ name: 'Alice', age: 30 });
|
||||||
|
|
||||||
|
const user = await db.collection('users').findOne({ name: 'Alice' });
|
||||||
|
console.log(user); // { _id: ObjectId(...), name: 'Alice', age: 30 }
|
||||||
|
|
||||||
|
// Clean up
|
||||||
|
await client.close();
|
||||||
|
await server.stop();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Option 3: SmartMongo (Real MongoDB)
|
||||||
|
|
||||||
Spin up a real MongoDB replica set in memory — perfect for integration tests that need full MongoDB compatibility.
|
Spin up a real MongoDB replica set in memory — perfect for integration tests that need full MongoDB compatibility.
|
||||||
|
|
||||||
@@ -51,34 +109,42 @@ console.log(descriptor.mongoDbUrl); // mongodb://127.0.0.1:xxxxx/...
|
|||||||
await mongo.stop();
|
await mongo.stop();
|
||||||
```
|
```
|
||||||
|
|
||||||
### Option 2: TsmDB (Wire Protocol Server)
|
## 📖 LocalTsmDb API
|
||||||
|
|
||||||
A lightweight, pure TypeScript MongoDB-compatible server that speaks the wire protocol — use the official `mongodb` driver directly!
|
The simplest option for local development and prototyping — zero config, auto port discovery, and automatic persistence.
|
||||||
|
|
||||||
|
### Basic Usage
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
import { tsmdb } from '@push.rocks/smartmongo';
|
import { LocalTsmDb } from '@push.rocks/smartmongo';
|
||||||
import { MongoClient } from 'mongodb';
|
|
||||||
|
|
||||||
// Start TsmDB server
|
const db = new LocalTsmDb({
|
||||||
const server = new tsmdb.TsmdbServer({ port: 27017 });
|
folderPath: './data', // Required: where to store data
|
||||||
await server.start();
|
port: 27017, // Optional: defaults to auto-discovery
|
||||||
|
host: '127.0.0.1', // Optional: bind address
|
||||||
|
});
|
||||||
|
|
||||||
// Connect with the official MongoDB driver!
|
// Start and get connected client
|
||||||
const client = new MongoClient('mongodb://127.0.0.1:27017');
|
const client = await db.start();
|
||||||
await client.connect();
|
|
||||||
|
|
||||||
// Use exactly like real MongoDB
|
// Access the underlying server if needed
|
||||||
const db = client.db('myapp');
|
const server = db.getServer();
|
||||||
await db.collection('users').insertOne({ name: 'Alice', age: 30 });
|
const uri = db.getConnectionUri();
|
||||||
|
|
||||||
const user = await db.collection('users').findOne({ name: 'Alice' });
|
// Check status
|
||||||
console.log(user); // { _id: ObjectId(...), name: 'Alice', age: 30 }
|
console.log(db.running); // true
|
||||||
|
|
||||||
// Clean up
|
// Stop when done
|
||||||
await client.close();
|
await db.stop();
|
||||||
await server.stop();
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- 🔍 **Auto Port Discovery** — Automatically finds an available port if 27017 is in use
|
||||||
|
- 💾 **Automatic Persistence** — Data saved to files, survives restarts
|
||||||
|
- 🔌 **Pre-connected Client** — `start()` returns a ready-to-use MongoDB client
|
||||||
|
- 🎯 **Zero Config** — Just specify a folder path and you're good to go
|
||||||
|
|
||||||
## 📖 SmartMongo API
|
## 📖 SmartMongo API
|
||||||
|
|
||||||
### Creating an Instance
|
### Creating an Instance
|
||||||
@@ -134,8 +200,8 @@ await server.start();
|
|||||||
console.log(server.getConnectionUri()); // mongodb://127.0.0.1:27017
|
console.log(server.getConnectionUri()); // mongodb://127.0.0.1:27017
|
||||||
|
|
||||||
// Server properties
|
// Server properties
|
||||||
console.log(server.running); // true
|
console.log(server.running); // true
|
||||||
console.log(server.getUptime()); // seconds
|
console.log(server.getUptime()); // seconds
|
||||||
console.log(server.getConnectionCount()); // active connections
|
console.log(server.getConnectionCount()); // active connections
|
||||||
|
|
||||||
await server.stop();
|
await server.stop();
|
||||||
@@ -279,7 +345,7 @@ console.log(result.deletedCount); // 1
|
|||||||
|
|
||||||
### Storage Adapters
|
### Storage Adapters
|
||||||
|
|
||||||
TsmDB supports pluggable storage:
|
TsmDB supports pluggable storage with data integrity features:
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// In-memory (default) - fast, data lost on stop
|
// In-memory (default) - fast, data lost on stop
|
||||||
@@ -292,13 +358,113 @@ const server = new tsmdb.TsmdbServer({
|
|||||||
persistIntervalMs: 30000 // Save every 30 seconds
|
persistIntervalMs: 30000 // Save every 30 seconds
|
||||||
});
|
});
|
||||||
|
|
||||||
// File-based - persistent storage
|
// File-based - persistent storage with optional checksums
|
||||||
|
import { tsmdb } from '@push.rocks/smartmongo';
|
||||||
|
|
||||||
const server = new tsmdb.TsmdbServer({
|
const server = new tsmdb.TsmdbServer({
|
||||||
storage: 'file',
|
storage: 'file',
|
||||||
storagePath: './data/tsmdb'
|
storagePath: './data/tsmdb'
|
||||||
});
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## ⚡ Performance & Reliability Features
|
||||||
|
|
||||||
|
TsmDB includes enterprise-grade features for robustness:
|
||||||
|
|
||||||
|
### 🔍 Index-Accelerated Queries
|
||||||
|
|
||||||
|
Indexes are automatically used to accelerate queries. Instead of scanning all documents, TsmDB uses:
|
||||||
|
|
||||||
|
- **Hash indexes** for equality queries (`$eq`, `$in`)
|
||||||
|
- **B-tree indexes** for range queries (`$gt`, `$gte`, `$lt`, `$lte`)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Create an index
|
||||||
|
await collection.createIndex({ email: 1 });
|
||||||
|
await collection.createIndex({ age: 1 });
|
||||||
|
|
||||||
|
// These queries will use the index (fast!)
|
||||||
|
await collection.findOne({ email: 'alice@example.com' }); // Uses hash lookup
|
||||||
|
await collection.find({ age: { $gte: 18, $lt: 65 } }); // Uses B-tree range scan
|
||||||
|
```
|
||||||
|
|
||||||
|
### 📊 Query Planner
|
||||||
|
|
||||||
|
TsmDB includes a query planner that analyzes queries and selects optimal execution strategies:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { tsmdb } from '@push.rocks/smartmongo';
|
||||||
|
|
||||||
|
// For debugging, you can access the query planner
|
||||||
|
const planner = new tsmdb.QueryPlanner(indexEngine);
|
||||||
|
const plan = await planner.plan(filter);
|
||||||
|
|
||||||
|
console.log(plan);
|
||||||
|
// {
|
||||||
|
// type: 'IXSCAN', // or 'IXSCAN_RANGE', 'COLLSCAN'
|
||||||
|
// indexName: 'email_1',
|
||||||
|
// selectivity: 0.01,
|
||||||
|
// indexCovering: true
|
||||||
|
// }
|
||||||
|
```
|
||||||
|
|
||||||
|
### 📝 Write-Ahead Logging (WAL)
|
||||||
|
|
||||||
|
For durability, TsmDB supports write-ahead logging:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { tsmdb } from '@push.rocks/smartmongo';
|
||||||
|
|
||||||
|
const wal = new tsmdb.WAL('./data/wal.log');
|
||||||
|
await wal.initialize();
|
||||||
|
|
||||||
|
// WAL entries include:
|
||||||
|
// - LSN (Log Sequence Number)
|
||||||
|
// - Timestamp
|
||||||
|
// - Operation type (insert, update, delete, checkpoint)
|
||||||
|
// - Document data (BSON serialized)
|
||||||
|
// - CRC32 checksum for integrity
|
||||||
|
|
||||||
|
// Recovery support
|
||||||
|
const entries = wal.getEntriesAfter(lastCheckpointLsn);
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🔐 Session Management
|
||||||
|
|
||||||
|
TsmDB tracks client sessions with automatic timeout and transaction linking:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Sessions are automatically managed when using the MongoDB driver
|
||||||
|
const session = client.startSession();
|
||||||
|
|
||||||
|
try {
|
||||||
|
session.startTransaction();
|
||||||
|
await collection.insertOne({ name: 'Alice' }, { session });
|
||||||
|
await collection.updateOne({ name: 'Bob' }, { $inc: { balance: 100 } }, { session });
|
||||||
|
await session.commitTransaction();
|
||||||
|
} catch (error) {
|
||||||
|
await session.abortTransaction();
|
||||||
|
} finally {
|
||||||
|
session.endSession();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Session features:
|
||||||
|
// - Automatic session timeout (30 minutes default)
|
||||||
|
// - Transaction auto-abort on session expiry
|
||||||
|
// - Session activity tracking
|
||||||
|
```
|
||||||
|
|
||||||
|
### ✅ Data Integrity Checksums
|
||||||
|
|
||||||
|
File-based storage supports CRC32 checksums to detect corruption:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { tsmdb } from '@push.rocks/smartmongo';
|
||||||
|
|
||||||
|
// Checksums are used internally for WAL and data integrity
|
||||||
|
// Documents are checksummed on write, verified on read
|
||||||
|
```
|
||||||
|
|
||||||
### 📋 Supported Wire Protocol Commands
|
### 📋 Supported Wire Protocol Commands
|
||||||
|
|
||||||
| Category | Commands |
|
| Category | Commands |
|
||||||
@@ -307,21 +473,55 @@ const server = new tsmdb.TsmdbServer({
|
|||||||
| **CRUD** | `find`, `insert`, `update`, `delete`, `findAndModify`, `getMore`, `killCursors` |
|
| **CRUD** | `find`, `insert`, `update`, `delete`, `findAndModify`, `getMore`, `killCursors` |
|
||||||
| **Aggregation** | `aggregate`, `count`, `distinct` |
|
| **Aggregation** | `aggregate`, `count`, `distinct` |
|
||||||
| **Indexes** | `createIndexes`, `dropIndexes`, `listIndexes` |
|
| **Indexes** | `createIndexes`, `dropIndexes`, `listIndexes` |
|
||||||
| **Admin** | `ping`, `listDatabases`, `listCollections`, `drop`, `dropDatabase`, `create`, `serverStatus`, `buildInfo` |
|
| **Transactions** | `startTransaction`, `commitTransaction`, `abortTransaction` |
|
||||||
|
| **Sessions** | `startSession`, `endSessions` |
|
||||||
|
| **Admin** | `ping`, `listDatabases`, `listCollections`, `drop`, `dropDatabase`, `create`, `serverStatus`, `buildInfo`, `dbStats`, `collStats` |
|
||||||
|
|
||||||
TsmDB supports MongoDB wire protocol versions 0-21, compatible with MongoDB 3.6 through 7.0 drivers.
|
TsmDB supports MongoDB wire protocol versions 0-21, compatible with MongoDB 3.6 through 7.0 drivers.
|
||||||
|
|
||||||
## 🧪 Testing Examples
|
## 🧪 Testing Examples
|
||||||
|
|
||||||
|
### Jest/Mocha with LocalTsmDb
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { LocalTsmDb } from '@push.rocks/smartmongo';
|
||||||
|
import { MongoClient, Db } from 'mongodb';
|
||||||
|
|
||||||
|
let db: LocalTsmDb;
|
||||||
|
let client: MongoClient;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
db = new LocalTsmDb({ folderPath: './test-data' });
|
||||||
|
client = await db.start();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await db.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
// Clean slate for each test
|
||||||
|
await client.db('test').dropDatabase();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should insert and find user', async () => {
|
||||||
|
const users = client.db('test').collection('users');
|
||||||
|
await users.insertOne({ name: 'Alice', email: 'alice@example.com' });
|
||||||
|
|
||||||
|
const user = await users.findOne({ name: 'Alice' });
|
||||||
|
expect(user?.email).toBe('alice@example.com');
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
### Jest/Mocha with TsmDB
|
### Jest/Mocha with TsmDB
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
import { tsmdb } from '@push.rocks/smartmongo';
|
import { tsmdb } from '@push.rocks/smartmongo';
|
||||||
import { MongoClient } from 'mongodb';
|
import { MongoClient, Db } from 'mongodb';
|
||||||
|
|
||||||
let server: tsmdb.TsmdbServer;
|
let server: tsmdb.TsmdbServer;
|
||||||
let client: MongoClient;
|
let client: MongoClient;
|
||||||
let db: Db;
|
let testDb: Db;
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
server = new tsmdb.TsmdbServer({ port: 27117 });
|
server = new tsmdb.TsmdbServer({ port: 27117 });
|
||||||
@@ -329,7 +529,7 @@ beforeAll(async () => {
|
|||||||
|
|
||||||
client = new MongoClient('mongodb://127.0.0.1:27117');
|
client = new MongoClient('mongodb://127.0.0.1:27117');
|
||||||
await client.connect();
|
await client.connect();
|
||||||
db = client.db('test');
|
testDb = client.db('test');
|
||||||
});
|
});
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
@@ -338,12 +538,11 @@ afterAll(async () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
// Clean slate for each test
|
await testDb.dropDatabase();
|
||||||
await db.dropDatabase();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test('should insert and find user', async () => {
|
test('should insert and find user', async () => {
|
||||||
const users = db.collection('users');
|
const users = testDb.collection('users');
|
||||||
await users.insertOne({ name: 'Alice', email: 'alice@example.com' });
|
await users.insertOne({ name: 'Alice', email: 'alice@example.com' });
|
||||||
|
|
||||||
const user = await users.findOne({ name: 'Alice' });
|
const user = await users.findOne({ name: 'Alice' });
|
||||||
@@ -355,22 +554,18 @@ test('should insert and find user', async () => {
|
|||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
import { tsmdb } from '@push.rocks/smartmongo';
|
import { LocalTsmDb } from '@push.rocks/smartmongo';
|
||||||
import { MongoClient } from 'mongodb';
|
|
||||||
|
|
||||||
let server: tsmdb.TsmdbServer;
|
let db: LocalTsmDb;
|
||||||
let client: MongoClient;
|
|
||||||
|
|
||||||
tap.test('setup', async () => {
|
tap.test('setup', async () => {
|
||||||
server = new tsmdb.TsmdbServer({ port: 27117 });
|
db = new LocalTsmDb({ folderPath: './test-data' });
|
||||||
await server.start();
|
await db.start();
|
||||||
client = new MongoClient('mongodb://127.0.0.1:27117');
|
|
||||||
await client.connect();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
tap.test('should perform CRUD operations', async () => {
|
tap.test('should perform CRUD operations', async () => {
|
||||||
const db = client.db('test');
|
const client = db.getClient();
|
||||||
const col = db.collection('items');
|
const col = client.db('test').collection('items');
|
||||||
|
|
||||||
// Create
|
// Create
|
||||||
const result = await col.insertOne({ name: 'Widget', price: 9.99 });
|
const result = await col.insertOne({ name: 'Widget', price: 9.99 });
|
||||||
@@ -392,8 +587,7 @@ tap.test('should perform CRUD operations', async () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
tap.test('teardown', async () => {
|
tap.test('teardown', async () => {
|
||||||
await client.close();
|
await db.stop();
|
||||||
await server.stop();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export default tap.start();
|
export default tap.start();
|
||||||
@@ -401,6 +595,15 @@ export default tap.start();
|
|||||||
|
|
||||||
## 🏗️ Architecture
|
## 🏗️ Architecture
|
||||||
|
|
||||||
|
### Module Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
@push.rocks/smartmongo
|
||||||
|
├── SmartMongo → Real MongoDB memory server (mongodb-memory-server wrapper)
|
||||||
|
├── tsmdb → Wire protocol server with full engine stack
|
||||||
|
└── LocalTsmDb → Zero-config local database (convenience wrapper)
|
||||||
|
```
|
||||||
|
|
||||||
### TsmDB Wire Protocol Stack
|
### TsmDB Wire Protocol Stack
|
||||||
|
|
||||||
```
|
```
|
||||||
@@ -421,21 +624,37 @@ export default tap.start();
|
|||||||
▼
|
▼
|
||||||
┌─────────────────────────────────────────────────────────────┐
|
┌─────────────────────────────────────────────────────────────┐
|
||||||
│ Engines │
|
│ Engines │
|
||||||
│ ┌───────────┐ ┌────────────┐ ┌────────────┐ ┌───────────┐ │
|
│ ┌─────────┐ ┌────────┐ ┌───────────┐ ┌───────┐ ┌───────┐ │
|
||||||
│ │ Query │ │ Update │ │Aggregation │ │ Index │ │
|
│ │ Query │ │ Update │ │Aggregation│ │ Index │ │Session│ │
|
||||||
│ │ Engine │ │ Engine │ │ Engine │ │ Engine │ │
|
│ │ Planner │ │ Engine │ │ Engine │ │Engine │ │Engine │ │
|
||||||
│ └───────────┘ └────────────┘ └────────────┘ └───────────┘ │
|
│ └─────────┘ └────────┘ └───────────┘ └───────┘ └───────┘ │
|
||||||
|
│ ┌──────────────────────┐ │
|
||||||
|
│ │ Transaction Engine │ │
|
||||||
|
│ └──────────────────────┘ │
|
||||||
└─────────────────────────┬───────────────────────────────────┘
|
└─────────────────────────┬───────────────────────────────────┘
|
||||||
│
|
│
|
||||||
▼
|
▼
|
||||||
┌─────────────────────────────────────────────────────────────┐
|
┌─────────────────────────────────────────────────────────────┐
|
||||||
│ Storage Adapters │
|
│ Storage Layer │
|
||||||
│ ┌──────────────────┐ ┌──────────────────┐ │
|
│ ┌──────────────────┐ ┌──────────────────┐ ┌──────────┐ │
|
||||||
│ │ MemoryStorage │ │ FileStorage │ │
|
│ │ MemoryStorage │ │ FileStorage │ │ WAL │ │
|
||||||
│ └──────────────────┘ └──────────────────┘ │
|
│ │ │ │ (+ Checksums) │ │ │ │
|
||||||
|
│ └──────────────────┘ └──────────────────┘ └──────────┘ │
|
||||||
└─────────────────────────────────────────────────────────────┘
|
└─────────────────────────────────────────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Key Components
|
||||||
|
|
||||||
|
| Component | Description |
|
||||||
|
|-----------|-------------|
|
||||||
|
| **WireProtocol** | Parses MongoDB OP_MSG binary protocol |
|
||||||
|
| **CommandRouter** | Routes commands to appropriate handlers |
|
||||||
|
| **QueryPlanner** | Analyzes queries and selects execution strategy |
|
||||||
|
| **IndexEngine** | Manages B-tree and hash indexes |
|
||||||
|
| **SessionEngine** | Tracks client sessions and timeouts |
|
||||||
|
| **TransactionEngine** | Handles ACID transaction semantics |
|
||||||
|
| **WAL** | Write-ahead logging for durability |
|
||||||
|
|
||||||
## License and Legal Information
|
## License and Legal Information
|
||||||
|
|
||||||
This repository contains open-source code licensed under the MIT License. A copy of the license can be found in the [LICENSE](./LICENSE) file.
|
This repository contains open-source code licensed under the MIT License. A copy of the license can be found in the [LICENSE](./LICENSE) file.
|
||||||
|
|||||||
232
test/test.tsmdb.checksum.ts
Normal file
232
test/test.tsmdb.checksum.ts
Normal file
@@ -0,0 +1,232 @@
|
|||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as smartmongo from '../ts/index.js';
|
||||||
|
|
||||||
|
const {
|
||||||
|
calculateCRC32,
|
||||||
|
calculateCRC32Buffer,
|
||||||
|
calculateDocumentChecksum,
|
||||||
|
addChecksum,
|
||||||
|
verifyChecksum,
|
||||||
|
removeChecksum,
|
||||||
|
} = smartmongo.tsmdb;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// CRC32 String Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32 should return consistent value for same input', async () => {
|
||||||
|
const result1 = calculateCRC32('hello world');
|
||||||
|
const result2 = calculateCRC32('hello world');
|
||||||
|
expect(result1).toEqual(result2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32 should return different values for different inputs', async () => {
|
||||||
|
const result1 = calculateCRC32('hello');
|
||||||
|
const result2 = calculateCRC32('world');
|
||||||
|
expect(result1).not.toEqual(result2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32 should return a 32-bit unsigned integer', async () => {
|
||||||
|
const result = calculateCRC32('test string');
|
||||||
|
expect(result).toBeGreaterThanOrEqual(0);
|
||||||
|
expect(result).toBeLessThanOrEqual(0xFFFFFFFF);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32 should handle empty string', async () => {
|
||||||
|
const result = calculateCRC32('');
|
||||||
|
expect(typeof result).toEqual('number');
|
||||||
|
expect(result).toBeGreaterThanOrEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32 should handle special characters', async () => {
|
||||||
|
const result = calculateCRC32('hello\nworld\t!"#$%&\'()');
|
||||||
|
expect(typeof result).toEqual('number');
|
||||||
|
expect(result).toBeGreaterThanOrEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32 should handle unicode characters', async () => {
|
||||||
|
const result = calculateCRC32('hello 世界 🌍');
|
||||||
|
expect(typeof result).toEqual('number');
|
||||||
|
expect(result).toBeGreaterThanOrEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// CRC32 Buffer Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32Buffer should return consistent value for same input', async () => {
|
||||||
|
const buffer = Buffer.from('hello world');
|
||||||
|
const result1 = calculateCRC32Buffer(buffer);
|
||||||
|
const result2 = calculateCRC32Buffer(buffer);
|
||||||
|
expect(result1).toEqual(result2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32Buffer should return different values for different inputs', async () => {
|
||||||
|
const buffer1 = Buffer.from('hello');
|
||||||
|
const buffer2 = Buffer.from('world');
|
||||||
|
const result1 = calculateCRC32Buffer(buffer1);
|
||||||
|
const result2 = calculateCRC32Buffer(buffer2);
|
||||||
|
expect(result1).not.toEqual(result2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32Buffer should handle empty buffer', async () => {
|
||||||
|
const result = calculateCRC32Buffer(Buffer.from(''));
|
||||||
|
expect(typeof result).toEqual('number');
|
||||||
|
expect(result).toBeGreaterThanOrEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32Buffer should handle binary data', async () => {
|
||||||
|
const buffer = Buffer.from([0x00, 0xFF, 0x7F, 0x80, 0x01]);
|
||||||
|
const result = calculateCRC32Buffer(buffer);
|
||||||
|
expect(typeof result).toEqual('number');
|
||||||
|
expect(result).toBeGreaterThanOrEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Document Checksum Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('checksum: calculateDocumentChecksum should return consistent value', async () => {
|
||||||
|
const doc = { name: 'John', age: 30 };
|
||||||
|
const result1 = calculateDocumentChecksum(doc);
|
||||||
|
const result2 = calculateDocumentChecksum(doc);
|
||||||
|
expect(result1).toEqual(result2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateDocumentChecksum should exclude _checksum field', async () => {
|
||||||
|
const doc1 = { name: 'John', age: 30 };
|
||||||
|
const doc2 = { name: 'John', age: 30, _checksum: 12345 };
|
||||||
|
const result1 = calculateDocumentChecksum(doc1);
|
||||||
|
const result2 = calculateDocumentChecksum(doc2);
|
||||||
|
expect(result1).toEqual(result2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateDocumentChecksum should handle empty document', async () => {
|
||||||
|
const result = calculateDocumentChecksum({});
|
||||||
|
expect(typeof result).toEqual('number');
|
||||||
|
expect(result).toBeGreaterThanOrEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateDocumentChecksum should handle nested objects', async () => {
|
||||||
|
const doc = {
|
||||||
|
name: 'John',
|
||||||
|
address: {
|
||||||
|
street: '123 Main St',
|
||||||
|
city: 'Springfield',
|
||||||
|
zip: {
|
||||||
|
code: '12345',
|
||||||
|
plus4: '6789',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const result = calculateDocumentChecksum(doc);
|
||||||
|
expect(typeof result).toEqual('number');
|
||||||
|
expect(result).toBeGreaterThanOrEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateDocumentChecksum should handle arrays', async () => {
|
||||||
|
const doc = {
|
||||||
|
name: 'John',
|
||||||
|
tags: ['developer', 'tester', 'admin'],
|
||||||
|
scores: [95, 87, 92],
|
||||||
|
};
|
||||||
|
const result = calculateDocumentChecksum(doc);
|
||||||
|
expect(typeof result).toEqual('number');
|
||||||
|
expect(result).toBeGreaterThanOrEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Add/Verify/Remove Checksum Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('checksum: addChecksum should add _checksum field to document', async () => {
|
||||||
|
const doc = { name: 'John', age: 30 };
|
||||||
|
const docWithChecksum = addChecksum(doc);
|
||||||
|
|
||||||
|
expect('_checksum' in docWithChecksum).toBeTrue();
|
||||||
|
expect(typeof docWithChecksum._checksum).toEqual('number');
|
||||||
|
expect(docWithChecksum.name).toEqual('John');
|
||||||
|
expect(docWithChecksum.age).toEqual(30);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: addChecksum should not modify the original document', async () => {
|
||||||
|
const doc = { name: 'John', age: 30 };
|
||||||
|
addChecksum(doc);
|
||||||
|
expect('_checksum' in doc).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: verifyChecksum should return true for valid checksum', async () => {
|
||||||
|
const doc = { name: 'John', age: 30 };
|
||||||
|
const docWithChecksum = addChecksum(doc);
|
||||||
|
const isValid = verifyChecksum(docWithChecksum);
|
||||||
|
expect(isValid).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: verifyChecksum should return false for tampered document', async () => {
|
||||||
|
const doc = { name: 'John', age: 30 };
|
||||||
|
const docWithChecksum = addChecksum(doc);
|
||||||
|
|
||||||
|
// Tamper with the document
|
||||||
|
docWithChecksum.age = 31;
|
||||||
|
|
||||||
|
const isValid = verifyChecksum(docWithChecksum);
|
||||||
|
expect(isValid).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: verifyChecksum should return false for wrong checksum', async () => {
|
||||||
|
const doc = { name: 'John', age: 30, _checksum: 12345 };
|
||||||
|
const isValid = verifyChecksum(doc);
|
||||||
|
expect(isValid).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: verifyChecksum should return true for document without checksum', async () => {
|
||||||
|
const doc = { name: 'John', age: 30 };
|
||||||
|
const isValid = verifyChecksum(doc);
|
||||||
|
expect(isValid).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: removeChecksum should remove _checksum field', async () => {
|
||||||
|
const doc = { name: 'John', age: 30 };
|
||||||
|
const docWithChecksum = addChecksum(doc);
|
||||||
|
const docWithoutChecksum = removeChecksum(docWithChecksum);
|
||||||
|
|
||||||
|
expect('_checksum' in docWithoutChecksum).toBeFalse();
|
||||||
|
expect(docWithoutChecksum.name).toEqual('John');
|
||||||
|
expect(docWithoutChecksum.age).toEqual(30);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: removeChecksum should handle document without checksum', async () => {
|
||||||
|
const doc = { name: 'John', age: 30 };
|
||||||
|
const result = removeChecksum(doc);
|
||||||
|
|
||||||
|
expect('_checksum' in result).toBeFalse();
|
||||||
|
expect(result.name).toEqual('John');
|
||||||
|
expect(result.age).toEqual(30);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Round-trip Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('checksum: full round-trip - add, verify, remove', async () => {
|
||||||
|
const original = { name: 'Test', value: 42, nested: { a: 1, b: 2 } };
|
||||||
|
|
||||||
|
// Add checksum
|
||||||
|
const withChecksum = addChecksum(original);
|
||||||
|
expect('_checksum' in withChecksum).toBeTrue();
|
||||||
|
|
||||||
|
// Verify checksum
|
||||||
|
expect(verifyChecksum(withChecksum)).toBeTrue();
|
||||||
|
|
||||||
|
// Remove checksum
|
||||||
|
const restored = removeChecksum(withChecksum);
|
||||||
|
expect('_checksum' in restored).toBeFalse();
|
||||||
|
|
||||||
|
// Original data should be intact
|
||||||
|
expect(restored.name).toEqual('Test');
|
||||||
|
expect(restored.value).toEqual(42);
|
||||||
|
expect(restored.nested.a).toEqual(1);
|
||||||
|
expect(restored.nested.b).toEqual(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
417
test/test.tsmdb.indexengine.ts
Normal file
417
test/test.tsmdb.indexengine.ts
Normal file
@@ -0,0 +1,417 @@
|
|||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as smartmongo from '../ts/index.js';
|
||||||
|
|
||||||
|
const { IndexEngine, MemoryStorageAdapter, ObjectId } = smartmongo.tsmdb;
|
||||||
|
|
||||||
|
let storage: InstanceType<typeof MemoryStorageAdapter>;
|
||||||
|
let indexEngine: InstanceType<typeof IndexEngine>;
|
||||||
|
|
||||||
|
const TEST_DB = 'testdb';
|
||||||
|
const TEST_COLL = 'indextest';
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Setup
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: should create IndexEngine instance', async () => {
|
||||||
|
storage = new MemoryStorageAdapter();
|
||||||
|
await storage.initialize();
|
||||||
|
await storage.createCollection(TEST_DB, TEST_COLL);
|
||||||
|
|
||||||
|
indexEngine = new IndexEngine(TEST_DB, TEST_COLL, storage);
|
||||||
|
expect(indexEngine).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Index Creation Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: createIndex should create single-field index', async () => {
|
||||||
|
const indexName = await indexEngine.createIndex({ name: 1 });
|
||||||
|
|
||||||
|
expect(indexName).toEqual('name_1');
|
||||||
|
|
||||||
|
const exists = await indexEngine.indexExists('name_1');
|
||||||
|
expect(exists).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: createIndex should create compound index', async () => {
|
||||||
|
const indexName = await indexEngine.createIndex({ city: 1, state: -1 });
|
||||||
|
|
||||||
|
expect(indexName).toEqual('city_1_state_-1');
|
||||||
|
|
||||||
|
const exists = await indexEngine.indexExists('city_1_state_-1');
|
||||||
|
expect(exists).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: createIndex should use custom name if provided', async () => {
|
||||||
|
const indexName = await indexEngine.createIndex({ email: 1 }, { name: 'custom_email_index' });
|
||||||
|
|
||||||
|
expect(indexName).toEqual('custom_email_index');
|
||||||
|
|
||||||
|
const exists = await indexEngine.indexExists('custom_email_index');
|
||||||
|
expect(exists).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: createIndex should handle unique option', async () => {
|
||||||
|
const indexName = await indexEngine.createIndex({ uniqueField: 1 }, { unique: true });
|
||||||
|
|
||||||
|
expect(indexName).toEqual('uniqueField_1');
|
||||||
|
|
||||||
|
const indexes = await indexEngine.listIndexes();
|
||||||
|
const uniqueIndex = indexes.find(i => i.name === 'uniqueField_1');
|
||||||
|
expect(uniqueIndex!.unique).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: createIndex should handle sparse option', async () => {
|
||||||
|
const indexName = await indexEngine.createIndex({ sparseField: 1 }, { sparse: true });
|
||||||
|
|
||||||
|
expect(indexName).toEqual('sparseField_1');
|
||||||
|
|
||||||
|
const indexes = await indexEngine.listIndexes();
|
||||||
|
const sparseIndex = indexes.find(i => i.name === 'sparseField_1');
|
||||||
|
expect(sparseIndex!.sparse).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: createIndex should return existing index name if already exists', async () => {
|
||||||
|
const indexName1 = await indexEngine.createIndex({ existingField: 1 }, { name: 'existing_idx' });
|
||||||
|
const indexName2 = await indexEngine.createIndex({ existingField: 1 }, { name: 'existing_idx' });
|
||||||
|
|
||||||
|
expect(indexName1).toEqual(indexName2);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Index Listing and Existence Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: listIndexes should return all indexes', async () => {
|
||||||
|
const indexes = await indexEngine.listIndexes();
|
||||||
|
|
||||||
|
expect(indexes.length).toBeGreaterThanOrEqual(5); // _id_ + created indexes
|
||||||
|
expect(indexes.some(i => i.name === '_id_')).toBeTrue();
|
||||||
|
expect(indexes.some(i => i.name === 'name_1')).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: indexExists should return true for existing index', async () => {
|
||||||
|
const exists = await indexEngine.indexExists('name_1');
|
||||||
|
expect(exists).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: indexExists should return false for non-existent index', async () => {
|
||||||
|
const exists = await indexEngine.indexExists('nonexistent_index');
|
||||||
|
expect(exists).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Document Operations and Index Updates
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: should insert documents for index testing', async () => {
|
||||||
|
// Create a fresh index engine for document operations
|
||||||
|
await storage.dropCollection(TEST_DB, TEST_COLL);
|
||||||
|
await storage.createCollection(TEST_DB, TEST_COLL);
|
||||||
|
|
||||||
|
indexEngine = new IndexEngine(TEST_DB, TEST_COLL, storage);
|
||||||
|
|
||||||
|
// Create indexes first
|
||||||
|
await indexEngine.createIndex({ age: 1 });
|
||||||
|
await indexEngine.createIndex({ category: 1 });
|
||||||
|
|
||||||
|
// Insert test documents
|
||||||
|
const docs = [
|
||||||
|
{ _id: new ObjectId(), name: 'Alice', age: 25, category: 'A' },
|
||||||
|
{ _id: new ObjectId(), name: 'Bob', age: 30, category: 'B' },
|
||||||
|
{ _id: new ObjectId(), name: 'Charlie', age: 35, category: 'A' },
|
||||||
|
{ _id: new ObjectId(), name: 'Diana', age: 28, category: 'C' },
|
||||||
|
{ _id: new ObjectId(), name: 'Eve', age: 30, category: 'B' },
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const doc of docs) {
|
||||||
|
const stored = await storage.insertOne(TEST_DB, TEST_COLL, doc);
|
||||||
|
await indexEngine.onInsert(stored);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: onInsert should update indexes', async () => {
|
||||||
|
const newDoc = {
|
||||||
|
_id: new ObjectId(),
|
||||||
|
name: 'Frank',
|
||||||
|
age: 40,
|
||||||
|
category: 'D',
|
||||||
|
};
|
||||||
|
|
||||||
|
const stored = await storage.insertOne(TEST_DB, TEST_COLL, newDoc);
|
||||||
|
await indexEngine.onInsert(stored);
|
||||||
|
|
||||||
|
// Find by the indexed field
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ age: 40 });
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
expect(candidates!.size).toEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: onUpdate should update indexes correctly', async () => {
|
||||||
|
// Get an existing document
|
||||||
|
const docs = await storage.findAll(TEST_DB, TEST_COLL);
|
||||||
|
const oldDoc = docs.find(d => d.name === 'Alice')!;
|
||||||
|
|
||||||
|
// Update the document
|
||||||
|
const newDoc = { ...oldDoc, age: 26 };
|
||||||
|
await storage.updateById(TEST_DB, TEST_COLL, oldDoc._id, newDoc);
|
||||||
|
await indexEngine.onUpdate(oldDoc, newDoc);
|
||||||
|
|
||||||
|
// Old value should not be in index
|
||||||
|
const oldCandidates = await indexEngine.findCandidateIds({ age: 25 });
|
||||||
|
expect(oldCandidates).toBeTruthy();
|
||||||
|
expect(oldCandidates!.has(oldDoc._id.toHexString())).toBeFalse();
|
||||||
|
|
||||||
|
// New value should be in index
|
||||||
|
const newCandidates = await indexEngine.findCandidateIds({ age: 26 });
|
||||||
|
expect(newCandidates).toBeTruthy();
|
||||||
|
expect(newCandidates!.has(oldDoc._id.toHexString())).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: onDelete should remove from indexes', async () => {
|
||||||
|
const docs = await storage.findAll(TEST_DB, TEST_COLL);
|
||||||
|
const docToDelete = docs.find(d => d.name === 'Frank')!;
|
||||||
|
|
||||||
|
await storage.deleteById(TEST_DB, TEST_COLL, docToDelete._id);
|
||||||
|
await indexEngine.onDelete(docToDelete);
|
||||||
|
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ age: 40 });
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
expect(candidates!.has(docToDelete._id.toHexString())).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// findCandidateIds Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: findCandidateIds with equality filter', async () => {
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ age: 30 });
|
||||||
|
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
expect(candidates!.size).toEqual(2); // Bob and Eve both have age 30
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: findCandidateIds with $in filter', async () => {
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ age: { $in: [28, 30] } });
|
||||||
|
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
expect(candidates!.size).toEqual(3); // Diana (28), Bob (30), Eve (30)
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: findCandidateIds with no matching index', async () => {
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ nonIndexedField: 'value' });
|
||||||
|
|
||||||
|
// Should return null when no index can be used
|
||||||
|
expect(candidates).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: findCandidateIds with empty filter', async () => {
|
||||||
|
const candidates = await indexEngine.findCandidateIds({});
|
||||||
|
|
||||||
|
// Empty filter = no index can be used
|
||||||
|
expect(candidates).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Range Query Tests (B-Tree)
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: findCandidateIds with $gt', async () => {
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ age: { $gt: 30 } });
|
||||||
|
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
// Charlie (35) is > 30
|
||||||
|
expect(candidates!.size).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: findCandidateIds with $lt', async () => {
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ age: { $lt: 28 } });
|
||||||
|
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
// Alice (26) is < 28
|
||||||
|
expect(candidates!.size).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: findCandidateIds with $gte', async () => {
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ age: { $gte: 30 } });
|
||||||
|
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
// Bob (30), Eve (30), Charlie (35)
|
||||||
|
expect(candidates!.size).toBeGreaterThanOrEqual(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: findCandidateIds with $lte', async () => {
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ age: { $lte: 28 } });
|
||||||
|
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
// Alice (26), Diana (28)
|
||||||
|
expect(candidates!.size).toBeGreaterThanOrEqual(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: findCandidateIds with range $gt and $lt', async () => {
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ age: { $gt: 26, $lt: 35 } });
|
||||||
|
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
// Diana (28), Bob (30), Eve (30) are between 26 and 35 exclusive
|
||||||
|
expect(candidates!.size).toBeGreaterThanOrEqual(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Index Selection Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: selectIndex should return best index for equality', async () => {
|
||||||
|
const result = indexEngine.selectIndex({ age: 30 });
|
||||||
|
|
||||||
|
expect(result).toBeTruthy();
|
||||||
|
expect(result!.name).toEqual('age_1');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: selectIndex should return best index for range query', async () => {
|
||||||
|
const result = indexEngine.selectIndex({ age: { $gt: 25 } });
|
||||||
|
|
||||||
|
expect(result).toBeTruthy();
|
||||||
|
expect(result!.name).toEqual('age_1');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: selectIndex should return null for no matching filter', async () => {
|
||||||
|
const result = indexEngine.selectIndex({ nonIndexedField: 'value' });
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: selectIndex should return null for empty filter', async () => {
|
||||||
|
const result = indexEngine.selectIndex({});
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: selectIndex should prefer more specific indexes', async () => {
|
||||||
|
// Create a compound index
|
||||||
|
await indexEngine.createIndex({ age: 1, category: 1 }, { name: 'age_category_compound' });
|
||||||
|
|
||||||
|
// Query that matches compound index
|
||||||
|
const result = indexEngine.selectIndex({ age: 30, category: 'B' });
|
||||||
|
|
||||||
|
expect(result).toBeTruthy();
|
||||||
|
// Should prefer the compound index since it covers more fields
|
||||||
|
expect(result!.name).toEqual('age_category_compound');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Drop Index Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: dropIndex should remove the index', async () => {
|
||||||
|
await indexEngine.createIndex({ dropTest: 1 }, { name: 'drop_test_idx' });
|
||||||
|
expect(await indexEngine.indexExists('drop_test_idx')).toBeTrue();
|
||||||
|
|
||||||
|
await indexEngine.dropIndex('drop_test_idx');
|
||||||
|
expect(await indexEngine.indexExists('drop_test_idx')).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: dropIndex should throw for _id index', async () => {
|
||||||
|
let threw = false;
|
||||||
|
try {
|
||||||
|
await indexEngine.dropIndex('_id_');
|
||||||
|
} catch (e) {
|
||||||
|
threw = true;
|
||||||
|
}
|
||||||
|
expect(threw).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: dropIndex should throw for non-existent index', async () => {
|
||||||
|
let threw = false;
|
||||||
|
try {
|
||||||
|
await indexEngine.dropIndex('nonexistent_index');
|
||||||
|
} catch (e) {
|
||||||
|
threw = true;
|
||||||
|
}
|
||||||
|
expect(threw).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: dropAllIndexes should remove all indexes except _id', async () => {
|
||||||
|
// Create some indexes to drop
|
||||||
|
await indexEngine.createIndex({ toDrop1: 1 });
|
||||||
|
await indexEngine.createIndex({ toDrop2: 1 });
|
||||||
|
|
||||||
|
await indexEngine.dropAllIndexes();
|
||||||
|
|
||||||
|
const indexes = await indexEngine.listIndexes();
|
||||||
|
expect(indexes.length).toEqual(1);
|
||||||
|
expect(indexes[0].name).toEqual('_id_');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Unique Index Constraint Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: unique index should prevent duplicate inserts', async () => {
|
||||||
|
// Create fresh collection
|
||||||
|
await storage.dropCollection(TEST_DB, 'uniquetest');
|
||||||
|
await storage.createCollection(TEST_DB, 'uniquetest');
|
||||||
|
|
||||||
|
const uniqueIndexEngine = new IndexEngine(TEST_DB, 'uniquetest', storage);
|
||||||
|
await uniqueIndexEngine.createIndex({ email: 1 }, { unique: true });
|
||||||
|
|
||||||
|
// Insert first document
|
||||||
|
const doc1 = { _id: new ObjectId(), email: 'test@example.com', name: 'Test' };
|
||||||
|
const stored1 = await storage.insertOne(TEST_DB, 'uniquetest', doc1);
|
||||||
|
await uniqueIndexEngine.onInsert(stored1);
|
||||||
|
|
||||||
|
// Try to insert duplicate
|
||||||
|
const doc2 = { _id: new ObjectId(), email: 'test@example.com', name: 'Test2' };
|
||||||
|
const stored2 = await storage.insertOne(TEST_DB, 'uniquetest', doc2);
|
||||||
|
|
||||||
|
let threw = false;
|
||||||
|
try {
|
||||||
|
await uniqueIndexEngine.onInsert(stored2);
|
||||||
|
} catch (e: any) {
|
||||||
|
threw = true;
|
||||||
|
expect(e.message).toContain('duplicate key');
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(threw).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Sparse Index Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: sparse index should not include documents without the field', async () => {
|
||||||
|
// Create fresh collection
|
||||||
|
await storage.dropCollection(TEST_DB, 'sparsetest');
|
||||||
|
await storage.createCollection(TEST_DB, 'sparsetest');
|
||||||
|
|
||||||
|
const sparseIndexEngine = new IndexEngine(TEST_DB, 'sparsetest', storage);
|
||||||
|
await sparseIndexEngine.createIndex({ optionalField: 1 }, { sparse: true });
|
||||||
|
|
||||||
|
// Insert doc with the field
|
||||||
|
const doc1 = { _id: new ObjectId(), optionalField: 'hasValue', name: 'HasField' };
|
||||||
|
const stored1 = await storage.insertOne(TEST_DB, 'sparsetest', doc1);
|
||||||
|
await sparseIndexEngine.onInsert(stored1);
|
||||||
|
|
||||||
|
// Insert doc without the field
|
||||||
|
const doc2 = { _id: new ObjectId(), name: 'NoField' };
|
||||||
|
const stored2 = await storage.insertOne(TEST_DB, 'sparsetest', doc2);
|
||||||
|
await sparseIndexEngine.onInsert(stored2);
|
||||||
|
|
||||||
|
// Search for documents with the field
|
||||||
|
const candidates = await sparseIndexEngine.findCandidateIds({ optionalField: 'hasValue' });
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
expect(candidates!.size).toEqual(1);
|
||||||
|
expect(candidates!.has(stored1._id.toHexString())).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Cleanup
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: cleanup', async () => {
|
||||||
|
await storage.close();
|
||||||
|
expect(true).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
273
test/test.tsmdb.queryplanner.ts
Normal file
273
test/test.tsmdb.queryplanner.ts
Normal file
@@ -0,0 +1,273 @@
|
|||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as smartmongo from '../ts/index.js';
|
||||||
|
|
||||||
|
const { QueryPlanner, IndexEngine, MemoryStorageAdapter, ObjectId } = smartmongo.tsmdb;
|
||||||
|
|
||||||
|
let storage: InstanceType<typeof MemoryStorageAdapter>;
|
||||||
|
let indexEngine: InstanceType<typeof IndexEngine>;
|
||||||
|
let queryPlanner: InstanceType<typeof QueryPlanner>;
|
||||||
|
|
||||||
|
const TEST_DB = 'testdb';
|
||||||
|
const TEST_COLL = 'testcoll';
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Setup
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: should create QueryPlanner instance', async () => {
|
||||||
|
storage = new MemoryStorageAdapter();
|
||||||
|
await storage.initialize();
|
||||||
|
await storage.createCollection(TEST_DB, TEST_COLL);
|
||||||
|
|
||||||
|
indexEngine = new IndexEngine(TEST_DB, TEST_COLL, storage);
|
||||||
|
queryPlanner = new QueryPlanner(indexEngine);
|
||||||
|
|
||||||
|
expect(queryPlanner).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: should insert test documents', async () => {
|
||||||
|
// Insert test documents
|
||||||
|
const docs = [
|
||||||
|
{ _id: new ObjectId(), name: 'Alice', age: 25, city: 'NYC', category: 'A' },
|
||||||
|
{ _id: new ObjectId(), name: 'Bob', age: 30, city: 'LA', category: 'B' },
|
||||||
|
{ _id: new ObjectId(), name: 'Charlie', age: 35, city: 'NYC', category: 'A' },
|
||||||
|
{ _id: new ObjectId(), name: 'Diana', age: 28, city: 'Chicago', category: 'C' },
|
||||||
|
{ _id: new ObjectId(), name: 'Eve', age: 32, city: 'LA', category: 'B' },
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const doc of docs) {
|
||||||
|
await storage.insertOne(TEST_DB, TEST_COLL, doc);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Basic Plan Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: empty filter should result in COLLSCAN', async () => {
|
||||||
|
const plan = await queryPlanner.plan({});
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('COLLSCAN');
|
||||||
|
expect(plan.indexCovering).toBeFalse();
|
||||||
|
expect(plan.selectivity).toEqual(1.0);
|
||||||
|
expect(plan.explanation).toContain('No filter');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: null filter should result in COLLSCAN', async () => {
|
||||||
|
const plan = await queryPlanner.plan(null as any);
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('COLLSCAN');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: filter with no matching index should result in COLLSCAN', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ nonExistentField: 'value' });
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('COLLSCAN');
|
||||||
|
expect(plan.explanation).toContain('No suitable index');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Index Scan Tests (with indexes)
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: should create test indexes', async () => {
|
||||||
|
await indexEngine.createIndex({ age: 1 }, { name: 'age_1' });
|
||||||
|
await indexEngine.createIndex({ name: 1 }, { name: 'name_1' });
|
||||||
|
await indexEngine.createIndex({ city: 1, category: 1 }, { name: 'city_category_1' });
|
||||||
|
|
||||||
|
const indexes = await indexEngine.listIndexes();
|
||||||
|
expect(indexes.length).toBeGreaterThanOrEqual(4); // _id_ + 3 created
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: simple equality filter should use IXSCAN', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ age: 30 });
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('IXSCAN');
|
||||||
|
expect(plan.indexName).toEqual('age_1');
|
||||||
|
expect(plan.indexFieldsUsed).toContain('age');
|
||||||
|
expect(plan.usesRange).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: $eq operator should use IXSCAN', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ name: { $eq: 'Alice' } });
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('IXSCAN');
|
||||||
|
expect(plan.indexName).toEqual('name_1');
|
||||||
|
expect(plan.indexFieldsUsed).toContain('name');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: range filter ($gt) should use IXSCAN_RANGE', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ age: { $gt: 25 } });
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('IXSCAN_RANGE');
|
||||||
|
expect(plan.indexName).toEqual('age_1');
|
||||||
|
expect(plan.usesRange).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: range filter ($lt) should use IXSCAN_RANGE', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ age: { $lt: 35 } });
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('IXSCAN_RANGE');
|
||||||
|
expect(plan.usesRange).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: range filter ($gte, $lte) should use IXSCAN_RANGE', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ age: { $gte: 25, $lte: 35 } });
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('IXSCAN_RANGE');
|
||||||
|
expect(plan.usesRange).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: $in operator should use IXSCAN', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ age: { $in: [25, 30, 35] } });
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('IXSCAN');
|
||||||
|
expect(plan.indexName).toEqual('age_1');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Compound Index Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: compound index - first field equality should use index', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ city: 'NYC' });
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('IXSCAN');
|
||||||
|
expect(plan.indexName).toEqual('city_category_1');
|
||||||
|
expect(plan.indexFieldsUsed).toContain('city');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: compound index - both fields should use full index', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ city: 'NYC', category: 'A' });
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('IXSCAN');
|
||||||
|
expect(plan.indexName).toEqual('city_category_1');
|
||||||
|
expect(plan.indexFieldsUsed).toContain('city');
|
||||||
|
expect(plan.indexFieldsUsed).toContain('category');
|
||||||
|
expect(plan.indexFieldsUsed.length).toEqual(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Selectivity Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: equality query should have low selectivity', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ age: 30 });
|
||||||
|
|
||||||
|
expect(plan.selectivity).toBeLessThan(0.1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: range query should have moderate selectivity', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ age: { $gt: 25 } });
|
||||||
|
|
||||||
|
expect(plan.selectivity).toBeGreaterThan(0);
|
||||||
|
expect(plan.selectivity).toBeLessThan(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: $in query selectivity depends on array size', async () => {
|
||||||
|
const smallInPlan = await queryPlanner.plan({ age: { $in: [25] } });
|
||||||
|
const largeInPlan = await queryPlanner.plan({ age: { $in: [25, 26, 27, 28, 29, 30] } });
|
||||||
|
|
||||||
|
// Larger $in should have higher selectivity (less selective = more documents)
|
||||||
|
expect(largeInPlan.selectivity).toBeGreaterThanOrEqual(smallInPlan.selectivity);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Index Covering Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: query covering all filter fields should be index covering', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ age: 30 });
|
||||||
|
|
||||||
|
// All filter fields are covered by the index
|
||||||
|
expect(plan.indexCovering).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: query with residual filter should not be index covering', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ city: 'NYC', name: 'Alice' });
|
||||||
|
|
||||||
|
// 'name' is not in the compound index city_category, so it's residual
|
||||||
|
expect(plan.indexCovering).toBeFalse();
|
||||||
|
expect(plan.residualFilter).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Explain Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: explain should return detailed plan info', async () => {
|
||||||
|
const explanation = await queryPlanner.explain({ age: 30 });
|
||||||
|
|
||||||
|
expect(explanation.queryPlanner).toBeTruthy();
|
||||||
|
expect(explanation.queryPlanner.plannerVersion).toEqual(1);
|
||||||
|
expect(explanation.queryPlanner.winningPlan).toBeTruthy();
|
||||||
|
expect(explanation.queryPlanner.rejectedPlans).toBeArray();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: explain should include winning and rejected plans', async () => {
|
||||||
|
const explanation = await queryPlanner.explain({ age: 30 });
|
||||||
|
|
||||||
|
expect(explanation.queryPlanner.winningPlan.type).toBeTruthy();
|
||||||
|
expect(explanation.queryPlanner.rejectedPlans.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: explain winning plan should be the best plan', async () => {
|
||||||
|
const explanation = await queryPlanner.explain({ age: 30 });
|
||||||
|
|
||||||
|
// Winning plan should use an index, not collection scan (if index exists)
|
||||||
|
expect(explanation.queryPlanner.winningPlan.type).toEqual('IXSCAN');
|
||||||
|
|
||||||
|
// There should be a COLLSCAN in rejected plans
|
||||||
|
const hasCOLLSCAN = explanation.queryPlanner.rejectedPlans.some(p => p.type === 'COLLSCAN');
|
||||||
|
expect(hasCOLLSCAN).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// $and Operator Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: $and conditions should be analyzed', async () => {
|
||||||
|
const plan = await queryPlanner.plan({
|
||||||
|
$and: [
|
||||||
|
{ age: { $gte: 25 } },
|
||||||
|
{ age: { $lte: 35 } },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('IXSCAN_RANGE');
|
||||||
|
expect(plan.indexName).toEqual('age_1');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Edge Cases
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: should handle complex nested operators', async () => {
|
||||||
|
const plan = await queryPlanner.plan({
|
||||||
|
age: { $gte: 20, $lte: 40 },
|
||||||
|
city: 'NYC',
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(plan).toBeTruthy();
|
||||||
|
expect(plan.type).not.toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: should handle $exists operator', async () => {
|
||||||
|
await indexEngine.createIndex({ email: 1 }, { name: 'email_1', sparse: true });
|
||||||
|
|
||||||
|
const plan = await queryPlanner.plan({ email: { $exists: true } });
|
||||||
|
|
||||||
|
// $exists can use sparse indexes
|
||||||
|
expect(plan).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Cleanup
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: cleanup', async () => {
|
||||||
|
await storage.close();
|
||||||
|
expect(true).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
361
test/test.tsmdb.session.ts
Normal file
361
test/test.tsmdb.session.ts
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as smartmongo from '../ts/index.js';
|
||||||
|
|
||||||
|
const { SessionEngine } = smartmongo.tsmdb;
|
||||||
|
|
||||||
|
let sessionEngine: InstanceType<typeof SessionEngine>;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Setup
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('session: should create SessionEngine instance', async () => {
|
||||||
|
sessionEngine = new SessionEngine({
|
||||||
|
sessionTimeoutMs: 1000, // 1 second for testing
|
||||||
|
cleanupIntervalMs: 10000, // 10 seconds to avoid cleanup during tests
|
||||||
|
});
|
||||||
|
expect(sessionEngine).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Session Lifecycle Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('session: startSession should create session with auto-generated ID', async () => {
|
||||||
|
const session = sessionEngine.startSession();
|
||||||
|
|
||||||
|
expect(session).toBeTruthy();
|
||||||
|
expect(session.id).toBeTruthy();
|
||||||
|
expect(session.id.length).toBeGreaterThanOrEqual(32); // UUID hex string (32 or 36 with hyphens)
|
||||||
|
expect(session.createdAt).toBeGreaterThan(0);
|
||||||
|
expect(session.lastActivityAt).toBeGreaterThan(0);
|
||||||
|
expect(session.inTransaction).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: startSession should create session with specified ID', async () => {
|
||||||
|
const customId = 'custom-session-id-12345';
|
||||||
|
const session = sessionEngine.startSession(customId);
|
||||||
|
|
||||||
|
expect(session.id).toEqual(customId);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: startSession should create session with metadata', async () => {
|
||||||
|
const metadata = { client: 'test-client', version: '1.0' };
|
||||||
|
const session = sessionEngine.startSession(undefined, metadata);
|
||||||
|
|
||||||
|
expect(session.metadata).toBeTruthy();
|
||||||
|
expect(session.metadata!.client).toEqual('test-client');
|
||||||
|
expect(session.metadata!.version).toEqual('1.0');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getSession should return session by ID', async () => {
|
||||||
|
const created = sessionEngine.startSession('get-session-test');
|
||||||
|
const retrieved = sessionEngine.getSession('get-session-test');
|
||||||
|
|
||||||
|
expect(retrieved).toBeTruthy();
|
||||||
|
expect(retrieved!.id).toEqual('get-session-test');
|
||||||
|
expect(retrieved!.id).toEqual(created.id);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getSession should return undefined for non-existent session', async () => {
|
||||||
|
const session = sessionEngine.getSession('non-existent-session-id');
|
||||||
|
expect(session).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: touchSession should update lastActivityAt', async () => {
|
||||||
|
const session = sessionEngine.startSession('touch-test-session');
|
||||||
|
const originalLastActivity = session.lastActivityAt;
|
||||||
|
|
||||||
|
// Wait a bit to ensure time difference
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 10));
|
||||||
|
|
||||||
|
const touched = sessionEngine.touchSession('touch-test-session');
|
||||||
|
expect(touched).toBeTrue();
|
||||||
|
|
||||||
|
const updated = sessionEngine.getSession('touch-test-session');
|
||||||
|
expect(updated!.lastActivityAt).toBeGreaterThanOrEqual(originalLastActivity);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: touchSession should return false for non-existent session', async () => {
|
||||||
|
const touched = sessionEngine.touchSession('non-existent-touch-session');
|
||||||
|
expect(touched).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: endSession should remove the session', async () => {
|
||||||
|
sessionEngine.startSession('end-session-test');
|
||||||
|
expect(sessionEngine.getSession('end-session-test')).toBeTruthy();
|
||||||
|
|
||||||
|
const ended = await sessionEngine.endSession('end-session-test');
|
||||||
|
expect(ended).toBeTrue();
|
||||||
|
|
||||||
|
expect(sessionEngine.getSession('end-session-test')).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: endSession should return false for non-existent session', async () => {
|
||||||
|
const ended = await sessionEngine.endSession('non-existent-end-session');
|
||||||
|
expect(ended).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Session Expiry Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('session: isSessionExpired should return false for fresh session', async () => {
|
||||||
|
const session = sessionEngine.startSession('fresh-session');
|
||||||
|
const isExpired = sessionEngine.isSessionExpired(session);
|
||||||
|
expect(isExpired).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: isSessionExpired should return true for old session', async () => {
|
||||||
|
// Create a session with old lastActivityAt
|
||||||
|
const session = sessionEngine.startSession('old-session');
|
||||||
|
// Manually set lastActivityAt to old value (sessionTimeoutMs is 1000ms)
|
||||||
|
(session as any).lastActivityAt = Date.now() - 2000;
|
||||||
|
|
||||||
|
const isExpired = sessionEngine.isSessionExpired(session);
|
||||||
|
expect(isExpired).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getSession should return undefined for expired session', async () => {
|
||||||
|
const session = sessionEngine.startSession('expiring-session');
|
||||||
|
// Manually expire the session
|
||||||
|
(session as any).lastActivityAt = Date.now() - 2000;
|
||||||
|
|
||||||
|
const retrieved = sessionEngine.getSession('expiring-session');
|
||||||
|
expect(retrieved).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Transaction Integration Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('session: startTransaction should mark session as in transaction', async () => {
|
||||||
|
sessionEngine.startSession('txn-session-1');
|
||||||
|
const started = sessionEngine.startTransaction('txn-session-1', 'txn-id-1', 1);
|
||||||
|
|
||||||
|
expect(started).toBeTrue();
|
||||||
|
|
||||||
|
const session = sessionEngine.getSession('txn-session-1');
|
||||||
|
expect(session!.inTransaction).toBeTrue();
|
||||||
|
expect(session!.txnId).toEqual('txn-id-1');
|
||||||
|
expect(session!.txnNumber).toEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: startTransaction should return false for non-existent session', async () => {
|
||||||
|
const started = sessionEngine.startTransaction('non-existent-txn-session', 'txn-id');
|
||||||
|
expect(started).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: endTransaction should clear transaction state', async () => {
|
||||||
|
sessionEngine.startSession('txn-session-2');
|
||||||
|
sessionEngine.startTransaction('txn-session-2', 'txn-id-2');
|
||||||
|
|
||||||
|
const ended = sessionEngine.endTransaction('txn-session-2');
|
||||||
|
expect(ended).toBeTrue();
|
||||||
|
|
||||||
|
const session = sessionEngine.getSession('txn-session-2');
|
||||||
|
expect(session!.inTransaction).toBeFalse();
|
||||||
|
expect(session!.txnId).toBeUndefined();
|
||||||
|
expect(session!.txnNumber).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: endTransaction should return false for non-existent session', async () => {
|
||||||
|
const ended = sessionEngine.endTransaction('non-existent-end-txn-session');
|
||||||
|
expect(ended).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getTransactionId should return transaction ID', async () => {
|
||||||
|
sessionEngine.startSession('txn-id-session');
|
||||||
|
sessionEngine.startTransaction('txn-id-session', 'my-txn-id');
|
||||||
|
|
||||||
|
const txnId = sessionEngine.getTransactionId('txn-id-session');
|
||||||
|
expect(txnId).toEqual('my-txn-id');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getTransactionId should return undefined for session without transaction', async () => {
|
||||||
|
sessionEngine.startSession('no-txn-session');
|
||||||
|
const txnId = sessionEngine.getTransactionId('no-txn-session');
|
||||||
|
expect(txnId).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getTransactionId should return undefined for non-existent session', async () => {
|
||||||
|
const txnId = sessionEngine.getTransactionId('non-existent-txn-id-session');
|
||||||
|
expect(txnId).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: isInTransaction should return correct state', async () => {
|
||||||
|
sessionEngine.startSession('in-txn-check-session');
|
||||||
|
|
||||||
|
expect(sessionEngine.isInTransaction('in-txn-check-session')).toBeFalse();
|
||||||
|
|
||||||
|
sessionEngine.startTransaction('in-txn-check-session', 'txn-check');
|
||||||
|
expect(sessionEngine.isInTransaction('in-txn-check-session')).toBeTrue();
|
||||||
|
|
||||||
|
sessionEngine.endTransaction('in-txn-check-session');
|
||||||
|
expect(sessionEngine.isInTransaction('in-txn-check-session')).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: isInTransaction should return false for non-existent session', async () => {
|
||||||
|
expect(sessionEngine.isInTransaction('non-existent-in-txn-session')).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Session Listing Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('session: listSessions should return all active sessions', async () => {
|
||||||
|
// Close and recreate to have a clean slate
|
||||||
|
sessionEngine.close();
|
||||||
|
sessionEngine = new SessionEngine({
|
||||||
|
sessionTimeoutMs: 10000,
|
||||||
|
cleanupIntervalMs: 60000,
|
||||||
|
});
|
||||||
|
|
||||||
|
sessionEngine.startSession('list-session-1');
|
||||||
|
sessionEngine.startSession('list-session-2');
|
||||||
|
sessionEngine.startSession('list-session-3');
|
||||||
|
|
||||||
|
const sessions = sessionEngine.listSessions();
|
||||||
|
expect(sessions.length).toEqual(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: listSessions should not include expired sessions', async () => {
|
||||||
|
const session = sessionEngine.startSession('expired-list-session');
|
||||||
|
// Expire the session
|
||||||
|
(session as any).lastActivityAt = Date.now() - 20000;
|
||||||
|
|
||||||
|
const sessions = sessionEngine.listSessions();
|
||||||
|
const found = sessions.find(s => s.id === 'expired-list-session');
|
||||||
|
expect(found).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getSessionCount should return correct count', async () => {
|
||||||
|
const count = sessionEngine.getSessionCount();
|
||||||
|
expect(count).toBeGreaterThanOrEqual(3); // We created 3 sessions above
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getSessionsWithTransactions should filter correctly', async () => {
|
||||||
|
// Clean slate
|
||||||
|
sessionEngine.close();
|
||||||
|
sessionEngine = new SessionEngine({
|
||||||
|
sessionTimeoutMs: 10000,
|
||||||
|
cleanupIntervalMs: 60000,
|
||||||
|
});
|
||||||
|
|
||||||
|
sessionEngine.startSession('no-txn-1');
|
||||||
|
sessionEngine.startSession('no-txn-2');
|
||||||
|
sessionEngine.startSession('with-txn-1');
|
||||||
|
sessionEngine.startSession('with-txn-2');
|
||||||
|
|
||||||
|
sessionEngine.startTransaction('with-txn-1', 'txn-a');
|
||||||
|
sessionEngine.startTransaction('with-txn-2', 'txn-b');
|
||||||
|
|
||||||
|
const txnSessions = sessionEngine.getSessionsWithTransactions();
|
||||||
|
expect(txnSessions.length).toEqual(2);
|
||||||
|
expect(txnSessions.every(s => s.inTransaction)).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// getOrCreateSession Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('session: getOrCreateSession should create if missing', async () => {
|
||||||
|
const session = sessionEngine.getOrCreateSession('get-or-create-new');
|
||||||
|
expect(session).toBeTruthy();
|
||||||
|
expect(session.id).toEqual('get-or-create-new');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getOrCreateSession should return existing session', async () => {
|
||||||
|
const created = sessionEngine.startSession('get-or-create-existing');
|
||||||
|
const retrieved = sessionEngine.getOrCreateSession('get-or-create-existing');
|
||||||
|
|
||||||
|
expect(retrieved.id).toEqual(created.id);
|
||||||
|
expect(retrieved.createdAt).toEqual(created.createdAt);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getOrCreateSession should touch existing session', async () => {
|
||||||
|
const session = sessionEngine.startSession('get-or-create-touch');
|
||||||
|
const originalLastActivity = session.lastActivityAt;
|
||||||
|
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 10));
|
||||||
|
|
||||||
|
sessionEngine.getOrCreateSession('get-or-create-touch');
|
||||||
|
const updated = sessionEngine.getSession('get-or-create-touch');
|
||||||
|
|
||||||
|
expect(updated!.lastActivityAt).toBeGreaterThanOrEqual(originalLastActivity);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// extractSessionId Static Method Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('session: extractSessionId should handle UUID object', async () => {
|
||||||
|
const { ObjectId } = smartmongo.tsmdb;
|
||||||
|
const uuid = new smartmongo.tsmdb.plugins.bson.UUID();
|
||||||
|
const lsid = { id: uuid };
|
||||||
|
|
||||||
|
const extracted = SessionEngine.extractSessionId(lsid);
|
||||||
|
expect(extracted).toEqual(uuid.toHexString());
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: extractSessionId should handle string ID', async () => {
|
||||||
|
const lsid = { id: 'string-session-id' };
|
||||||
|
|
||||||
|
const extracted = SessionEngine.extractSessionId(lsid);
|
||||||
|
expect(extracted).toEqual('string-session-id');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: extractSessionId should handle binary format', async () => {
|
||||||
|
const binaryData = Buffer.from('test-binary-uuid', 'utf8').toString('base64');
|
||||||
|
const lsid = { id: { $binary: { base64: binaryData } } };
|
||||||
|
|
||||||
|
const extracted = SessionEngine.extractSessionId(lsid);
|
||||||
|
expect(extracted).toBeTruthy();
|
||||||
|
expect(typeof extracted).toEqual('string');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: extractSessionId should return undefined for null/undefined', async () => {
|
||||||
|
expect(SessionEngine.extractSessionId(null)).toBeUndefined();
|
||||||
|
expect(SessionEngine.extractSessionId(undefined)).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: extractSessionId should return undefined for empty object', async () => {
|
||||||
|
expect(SessionEngine.extractSessionId({})).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// refreshSession Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('session: refreshSession should update lastActivityAt', async () => {
|
||||||
|
const session = sessionEngine.startSession('refresh-session-test');
|
||||||
|
const originalLastActivity = session.lastActivityAt;
|
||||||
|
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 10));
|
||||||
|
|
||||||
|
const refreshed = sessionEngine.refreshSession('refresh-session-test');
|
||||||
|
expect(refreshed).toBeTrue();
|
||||||
|
|
||||||
|
const updated = sessionEngine.getSession('refresh-session-test');
|
||||||
|
expect(updated!.lastActivityAt).toBeGreaterThanOrEqual(originalLastActivity);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: refreshSession should return false for non-existent session', async () => {
|
||||||
|
const refreshed = sessionEngine.refreshSession('non-existent-refresh-session');
|
||||||
|
expect(refreshed).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Cleanup
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('session: close should clear all sessions', async () => {
|
||||||
|
sessionEngine.startSession('close-test-session');
|
||||||
|
expect(sessionEngine.getSessionCount()).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
sessionEngine.close();
|
||||||
|
|
||||||
|
expect(sessionEngine.getSessionCount()).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
411
test/test.tsmdb.wal.ts
Normal file
411
test/test.tsmdb.wal.ts
Normal file
@@ -0,0 +1,411 @@
|
|||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as smartmongo from '../ts/index.js';
|
||||||
|
import * as path from 'path';
|
||||||
|
import * as fs from 'fs/promises';
|
||||||
|
|
||||||
|
const { WAL, ObjectId } = smartmongo.tsmdb;
|
||||||
|
|
||||||
|
let wal: InstanceType<typeof WAL>;
|
||||||
|
const TEST_WAL_PATH = '/tmp/tsmdb-test-wal/test.wal';
|
||||||
|
|
||||||
|
// Helper to clean up test files
|
||||||
|
async function cleanupTestFiles() {
|
||||||
|
try {
|
||||||
|
await fs.rm('/tmp/tsmdb-test-wal', { recursive: true, force: true });
|
||||||
|
} catch {
|
||||||
|
// Ignore if doesn't exist
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Setup
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: cleanup before tests', async () => {
|
||||||
|
await cleanupTestFiles();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: should create WAL instance', async () => {
|
||||||
|
wal = new WAL(TEST_WAL_PATH, { checkpointInterval: 100 });
|
||||||
|
expect(wal).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: should initialize WAL', async () => {
|
||||||
|
const result = await wal.initialize();
|
||||||
|
expect(result).toBeTruthy();
|
||||||
|
expect(result.recoveredEntries).toBeArray();
|
||||||
|
expect(result.recoveredEntries.length).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// LSN Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: getCurrentLsn should return 0 initially', async () => {
|
||||||
|
const lsn = wal.getCurrentLsn();
|
||||||
|
expect(lsn).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: LSN should increment after logging', async () => {
|
||||||
|
const doc = { _id: new ObjectId(), name: 'Test' };
|
||||||
|
const lsn = await wal.logInsert('testdb', 'testcoll', doc as any);
|
||||||
|
|
||||||
|
expect(lsn).toEqual(1);
|
||||||
|
expect(wal.getCurrentLsn()).toEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Insert Logging Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: logInsert should create entry with correct structure', async () => {
|
||||||
|
const doc = { _id: new ObjectId(), name: 'InsertTest', value: 42 };
|
||||||
|
const lsn = await wal.logInsert('testdb', 'insertcoll', doc as any);
|
||||||
|
|
||||||
|
expect(lsn).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
expect(entry).toBeTruthy();
|
||||||
|
expect(entry!.operation).toEqual('insert');
|
||||||
|
expect(entry!.dbName).toEqual('testdb');
|
||||||
|
expect(entry!.collName).toEqual('insertcoll');
|
||||||
|
expect(entry!.documentId).toEqual(doc._id.toHexString());
|
||||||
|
expect(entry!.data).toBeTruthy();
|
||||||
|
expect(entry!.timestamp).toBeGreaterThan(0);
|
||||||
|
expect(entry!.checksum).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: logInsert with transaction ID', async () => {
|
||||||
|
const doc = { _id: new ObjectId(), name: 'TxnInsertTest' };
|
||||||
|
const lsn = await wal.logInsert('testdb', 'insertcoll', doc as any, 'txn-123');
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
expect(entry!.txnId).toEqual('txn-123');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Update Logging Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: logUpdate should store old and new document', async () => {
|
||||||
|
const oldDoc = { _id: new ObjectId(), name: 'OldName', value: 1 };
|
||||||
|
const newDoc = { ...oldDoc, name: 'NewName', value: 2 };
|
||||||
|
|
||||||
|
const lsn = await wal.logUpdate('testdb', 'updatecoll', oldDoc as any, newDoc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
expect(entry).toBeTruthy();
|
||||||
|
expect(entry!.operation).toEqual('update');
|
||||||
|
expect(entry!.data).toBeTruthy();
|
||||||
|
expect(entry!.previousData).toBeTruthy();
|
||||||
|
expect(entry!.documentId).toEqual(oldDoc._id.toHexString());
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Delete Logging Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: logDelete should record deleted document', async () => {
|
||||||
|
const doc = { _id: new ObjectId(), name: 'ToDelete' };
|
||||||
|
|
||||||
|
const lsn = await wal.logDelete('testdb', 'deletecoll', doc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
expect(entry).toBeTruthy();
|
||||||
|
expect(entry!.operation).toEqual('delete');
|
||||||
|
expect(entry!.previousData).toBeTruthy();
|
||||||
|
expect(entry!.data).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Transaction Logging Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: logBeginTransaction should create begin entry', async () => {
|
||||||
|
const lsn = await wal.logBeginTransaction('txn-begin-test');
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
expect(entry).toBeTruthy();
|
||||||
|
expect(entry!.operation).toEqual('begin');
|
||||||
|
expect(entry!.txnId).toEqual('txn-begin-test');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: logCommitTransaction should create commit entry', async () => {
|
||||||
|
const lsn = await wal.logCommitTransaction('txn-commit-test');
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
expect(entry).toBeTruthy();
|
||||||
|
expect(entry!.operation).toEqual('commit');
|
||||||
|
expect(entry!.txnId).toEqual('txn-commit-test');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: logAbortTransaction should create abort entry', async () => {
|
||||||
|
const lsn = await wal.logAbortTransaction('txn-abort-test');
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
expect(entry).toBeTruthy();
|
||||||
|
expect(entry!.operation).toEqual('abort');
|
||||||
|
expect(entry!.txnId).toEqual('txn-abort-test');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// getTransactionEntries Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: getTransactionEntries should return entries for transaction', async () => {
|
||||||
|
// Log a complete transaction
|
||||||
|
const txnId = 'txn-entries-test';
|
||||||
|
await wal.logBeginTransaction(txnId);
|
||||||
|
|
||||||
|
const doc1 = { _id: new ObjectId(), name: 'TxnDoc1' };
|
||||||
|
await wal.logInsert('testdb', 'txncoll', doc1 as any, txnId);
|
||||||
|
|
||||||
|
const doc2 = { _id: new ObjectId(), name: 'TxnDoc2' };
|
||||||
|
await wal.logInsert('testdb', 'txncoll', doc2 as any, txnId);
|
||||||
|
|
||||||
|
await wal.logCommitTransaction(txnId);
|
||||||
|
|
||||||
|
const entries = wal.getTransactionEntries(txnId);
|
||||||
|
|
||||||
|
expect(entries.length).toEqual(4); // begin + 2 inserts + commit
|
||||||
|
expect(entries.every(e => e.txnId === txnId)).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: getTransactionEntries should return empty for unknown transaction', async () => {
|
||||||
|
const entries = wal.getTransactionEntries('unknown-txn-id');
|
||||||
|
expect(entries.length).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// getEntriesAfter Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: getEntriesAfter should filter by LSN', async () => {
|
||||||
|
const currentLsn = wal.getCurrentLsn();
|
||||||
|
|
||||||
|
// Add more entries
|
||||||
|
const doc = { _id: new ObjectId(), name: 'AfterTest' };
|
||||||
|
await wal.logInsert('testdb', 'aftercoll', doc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(currentLsn);
|
||||||
|
expect(entries.length).toEqual(1);
|
||||||
|
expect(entries[0].lsn).toBeGreaterThan(currentLsn);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: getEntriesAfter with LSN 0 should return all entries', async () => {
|
||||||
|
const entries = wal.getEntriesAfter(0);
|
||||||
|
expect(entries.length).toBeGreaterThan(0);
|
||||||
|
expect(entries.length).toEqual(wal.getCurrentLsn());
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Checkpoint Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: checkpoint should create checkpoint entry', async () => {
|
||||||
|
const lsn = await wal.checkpoint();
|
||||||
|
|
||||||
|
expect(lsn).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
// After checkpoint, getEntriesAfter(checkpoint) should be limited
|
||||||
|
const entries = wal.getEntriesAfter(0);
|
||||||
|
expect(entries.some(e => e.operation === 'checkpoint')).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Document Recovery Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: recoverDocument should deserialize document from entry', async () => {
|
||||||
|
const originalDoc = { _id: new ObjectId(), name: 'RecoverTest', nested: { a: 1, b: 2 } };
|
||||||
|
const lsn = await wal.logInsert('testdb', 'recovercoll', originalDoc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
const recovered = wal.recoverDocument(entry!);
|
||||||
|
|
||||||
|
expect(recovered).toBeTruthy();
|
||||||
|
expect(recovered!.name).toEqual('RecoverTest');
|
||||||
|
expect(recovered!.nested.a).toEqual(1);
|
||||||
|
expect(recovered!.nested.b).toEqual(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: recoverDocument should return null for entry without data', async () => {
|
||||||
|
const lsn = await wal.logBeginTransaction('recover-no-data');
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
const recovered = wal.recoverDocument(entry!);
|
||||||
|
expect(recovered).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: recoverPreviousDocument should deserialize previous state', async () => {
|
||||||
|
const oldDoc = { _id: new ObjectId(), name: 'Old', value: 100 };
|
||||||
|
const newDoc = { ...oldDoc, name: 'New', value: 200 };
|
||||||
|
|
||||||
|
const lsn = await wal.logUpdate('testdb', 'recovercoll', oldDoc as any, newDoc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
const previous = wal.recoverPreviousDocument(entry!);
|
||||||
|
|
||||||
|
expect(previous).toBeTruthy();
|
||||||
|
expect(previous!.name).toEqual('Old');
|
||||||
|
expect(previous!.value).toEqual(100);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: recoverPreviousDocument should return null for insert entry', async () => {
|
||||||
|
const doc = { _id: new ObjectId(), name: 'NoPrevious' };
|
||||||
|
const lsn = await wal.logInsert('testdb', 'recovercoll', doc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
const previous = wal.recoverPreviousDocument(entry!);
|
||||||
|
expect(previous).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// WAL Persistence and Recovery Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: should persist and recover entries', async () => {
|
||||||
|
// Close current WAL
|
||||||
|
await wal.close();
|
||||||
|
|
||||||
|
// Create new WAL instance and initialize (should recover)
|
||||||
|
const wal2 = new WAL(TEST_WAL_PATH, { checkpointInterval: 100 });
|
||||||
|
const result = await wal2.initialize();
|
||||||
|
|
||||||
|
// Should have recovered entries
|
||||||
|
expect(result.recoveredEntries).toBeArray();
|
||||||
|
// After checkpoint, there might not be many recoverable entries
|
||||||
|
// but getCurrentLsn should be preserved or reset
|
||||||
|
|
||||||
|
await wal2.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Entry Checksum Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: entries should have valid checksums', async () => {
|
||||||
|
wal = new WAL(TEST_WAL_PATH + '.checksum', { checkpointInterval: 100 });
|
||||||
|
await wal.initialize();
|
||||||
|
|
||||||
|
const doc = { _id: new ObjectId(), name: 'ChecksumTest' };
|
||||||
|
const lsn = await wal.logInsert('testdb', 'checksumcoll', doc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
expect(entry!.checksum).toBeGreaterThan(0);
|
||||||
|
expect(typeof entry!.checksum).toEqual('number');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Edge Cases
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: should handle special characters in document', async () => {
|
||||||
|
const doc = {
|
||||||
|
_id: new ObjectId(),
|
||||||
|
name: 'Test\nWith\tSpecial\r\nChars',
|
||||||
|
emoji: '🎉',
|
||||||
|
unicode: '日本語',
|
||||||
|
};
|
||||||
|
|
||||||
|
const lsn = await wal.logInsert('testdb', 'specialcoll', doc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
const recovered = wal.recoverDocument(entry!);
|
||||||
|
expect(recovered!.name).toEqual('Test\nWith\tSpecial\r\nChars');
|
||||||
|
expect(recovered!.emoji).toEqual('🎉');
|
||||||
|
expect(recovered!.unicode).toEqual('日本語');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: should handle binary data in documents', async () => {
|
||||||
|
const doc = {
|
||||||
|
_id: new ObjectId(),
|
||||||
|
binaryField: Buffer.from([0x00, 0xFF, 0x7F, 0x80]),
|
||||||
|
};
|
||||||
|
|
||||||
|
const lsn = await wal.logInsert('testdb', 'binarycoll', doc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
const recovered = wal.recoverDocument(entry!);
|
||||||
|
expect(recovered).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: should handle nested documents', async () => {
|
||||||
|
const doc = {
|
||||||
|
_id: new ObjectId(),
|
||||||
|
level1: {
|
||||||
|
level2: {
|
||||||
|
level3: {
|
||||||
|
value: 'deep',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const lsn = await wal.logInsert('testdb', 'nestedcoll', doc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
const recovered = wal.recoverDocument(entry!);
|
||||||
|
expect(recovered!.level1.level2.level3.value).toEqual('deep');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: should handle arrays in documents', async () => {
|
||||||
|
const doc = {
|
||||||
|
_id: new ObjectId(),
|
||||||
|
tags: ['a', 'b', 'c'],
|
||||||
|
numbers: [1, 2, 3],
|
||||||
|
mixed: [1, 'two', { three: 3 }],
|
||||||
|
};
|
||||||
|
|
||||||
|
const lsn = await wal.logInsert('testdb', 'arraycoll', doc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
const recovered = wal.recoverDocument(entry!);
|
||||||
|
expect(recovered!.tags).toEqual(['a', 'b', 'c']);
|
||||||
|
expect(recovered!.numbers).toEqual([1, 2, 3]);
|
||||||
|
expect(recovered!.mixed[2].three).toEqual(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Cleanup
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: cleanup', async () => {
|
||||||
|
await wal.close();
|
||||||
|
await cleanupTestFiles();
|
||||||
|
expect(true).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
@@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@push.rocks/smartmongo',
|
name: '@push.rocks/smartmongo',
|
||||||
version: '3.0.0',
|
version: '4.3.0',
|
||||||
description: 'A module for creating and managing a local MongoDB instance for testing purposes.'
|
description: 'A module for creating and managing a local MongoDB instance for testing purposes.'
|
||||||
}
|
}
|
||||||
|
|||||||
78
ts/index.ts
78
ts/index.ts
@@ -1,74 +1,14 @@
|
|||||||
import { commitinfo } from './00_commitinfo_data.js';
|
import { commitinfo } from './00_commitinfo_data.js';
|
||||||
import * as plugins from './smartmongo.plugins.js';
|
|
||||||
|
// Export SmartMongo from ts_mongotools
|
||||||
|
export { SmartMongo } from './ts_mongotools/index.js';
|
||||||
|
|
||||||
// Export TsmDB module
|
// Export TsmDB module
|
||||||
export * as tsmdb from './tsmdb/index.js';
|
export * as tsmdb from './ts_tsmdb/index.js';
|
||||||
|
|
||||||
export class SmartMongo {
|
// Export LocalTsmDb from ts_local
|
||||||
// STATIC
|
export { LocalTsmDb } from './ts_local/index.js';
|
||||||
public static async createAndStart(replCountArg: number = 1) {
|
export type { ILocalTsmDbOptions } from './ts_local/index.js';
|
||||||
const smartMongoInstance = new SmartMongo();
|
|
||||||
await smartMongoInstance.start(replCountArg);
|
|
||||||
return smartMongoInstance;
|
|
||||||
}
|
|
||||||
|
|
||||||
// INSTANCE
|
// Export commitinfo
|
||||||
private _readyDeferred = plugins.smartpromise.defer();
|
export { commitinfo };
|
||||||
public readyPromise = this._readyDeferred.promise;
|
|
||||||
public mongoReplicaSet: plugins.mongoPlugin.MongoMemoryReplSet;
|
|
||||||
|
|
||||||
constructor() {}
|
|
||||||
|
|
||||||
public async start(countArg: number = 1) {
|
|
||||||
this.mongoReplicaSet = await plugins.mongoPlugin.MongoMemoryReplSet.create({
|
|
||||||
replSet: { count: countArg },
|
|
||||||
instanceOpts: [
|
|
||||||
{
|
|
||||||
storageEngine: 'wiredTiger',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
this._readyDeferred.resolve();
|
|
||||||
console.log(`mongoReplicaSet with ${countArg} replicas started.`);
|
|
||||||
console.log(`@pushrocks/smartmongo version ${commitinfo.version}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* returns a mongo descriptor for modules like
|
|
||||||
* @pushrocks/smartfile.
|
|
||||||
*/
|
|
||||||
public async getMongoDescriptor(): Promise<plugins.smartdata.IMongoDescriptor> {
|
|
||||||
await this.readyPromise;
|
|
||||||
return {
|
|
||||||
mongoDbName: `smartmongo_testdatabase`,
|
|
||||||
mongoDbUrl: this.mongoReplicaSet.getUri(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* stops the smartmongo instance
|
|
||||||
* and cleans up after itself
|
|
||||||
*/
|
|
||||||
public async stop() {
|
|
||||||
await this.mongoReplicaSet.stop();
|
|
||||||
await this.mongoReplicaSet.cleanup();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* like stop() but allows you to actually store
|
|
||||||
* the database on disk
|
|
||||||
*/
|
|
||||||
public async stopAndDumpToDir(
|
|
||||||
dirArg: string,
|
|
||||||
nameFunctionArg?: (doc: any) => string,
|
|
||||||
emptyDirArg = true,
|
|
||||||
) {
|
|
||||||
const mongodumpInstance = new plugins.mongodump.MongoDump();
|
|
||||||
const mongodumpTarget = await mongodumpInstance.addMongoTargetByMongoDescriptor(
|
|
||||||
await this.getMongoDescriptor(),
|
|
||||||
);
|
|
||||||
await mongodumpTarget.dumpAllCollectionsToDir(dirArg, nameFunctionArg, emptyDirArg);
|
|
||||||
await mongodumpInstance.stop();
|
|
||||||
await this.stop();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
138
ts/ts_local/classes.localtsmdb.ts
Normal file
138
ts/ts_local/classes.localtsmdb.ts
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import { TsmdbServer } from '../ts_tsmdb/index.js';
|
||||||
|
import type { MongoClient } from 'mongodb';
|
||||||
|
|
||||||
|
export interface ILocalTsmDbOptions {
|
||||||
|
folderPath: string;
|
||||||
|
port?: number;
|
||||||
|
host?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* LocalTsmDb - Convenience class for local MongoDB-compatible database
|
||||||
|
*
|
||||||
|
* This class wraps TsmdbServer and provides a simple interface for
|
||||||
|
* starting a local file-based MongoDB-compatible server and connecting to it.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* import { LocalTsmDb } from '@push.rocks/smartmongo';
|
||||||
|
*
|
||||||
|
* const db = new LocalTsmDb({ folderPath: './data' });
|
||||||
|
* const client = await db.start();
|
||||||
|
*
|
||||||
|
* // Use the MongoDB client
|
||||||
|
* const collection = client.db('mydb').collection('users');
|
||||||
|
* await collection.insertOne({ name: 'Alice' });
|
||||||
|
*
|
||||||
|
* // When done
|
||||||
|
* await db.stop();
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export class LocalTsmDb {
|
||||||
|
private options: ILocalTsmDbOptions;
|
||||||
|
private server: TsmdbServer | null = null;
|
||||||
|
private client: MongoClient | null = null;
|
||||||
|
|
||||||
|
constructor(options: ILocalTsmDbOptions) {
|
||||||
|
this.options = options;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find an available port starting from the given port
|
||||||
|
*/
|
||||||
|
private async findAvailablePort(startPort = 27017): Promise<number> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const server = plugins.net.createServer();
|
||||||
|
server.listen(startPort, '127.0.0.1', () => {
|
||||||
|
const addr = server.address();
|
||||||
|
const port = typeof addr === 'object' && addr ? addr.port : startPort;
|
||||||
|
server.close(() => resolve(port));
|
||||||
|
});
|
||||||
|
server.on('error', () => {
|
||||||
|
this.findAvailablePort(startPort + 1).then(resolve).catch(reject);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start the local TsmDB server and return a connected MongoDB client
|
||||||
|
*/
|
||||||
|
async start(): Promise<MongoClient> {
|
||||||
|
if (this.server && this.client) {
|
||||||
|
throw new Error('LocalTsmDb is already running');
|
||||||
|
}
|
||||||
|
|
||||||
|
const port = this.options.port ?? await this.findAvailablePort();
|
||||||
|
const host = this.options.host ?? '127.0.0.1';
|
||||||
|
|
||||||
|
this.server = new TsmdbServer({
|
||||||
|
port,
|
||||||
|
host,
|
||||||
|
storage: 'file',
|
||||||
|
storagePath: this.options.folderPath,
|
||||||
|
});
|
||||||
|
await this.server.start();
|
||||||
|
|
||||||
|
// Dynamically import mongodb to avoid requiring it as a hard dependency
|
||||||
|
const mongodb = await import('mongodb');
|
||||||
|
this.client = new mongodb.MongoClient(this.server.getConnectionUri(), {
|
||||||
|
directConnection: true,
|
||||||
|
serverSelectionTimeoutMS: 5000,
|
||||||
|
});
|
||||||
|
await this.client.connect();
|
||||||
|
|
||||||
|
return this.client;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the MongoDB client (throws if not started)
|
||||||
|
*/
|
||||||
|
getClient(): MongoClient {
|
||||||
|
if (!this.client) {
|
||||||
|
throw new Error('LocalTsmDb is not running. Call start() first.');
|
||||||
|
}
|
||||||
|
return this.client;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the underlying TsmdbServer instance (throws if not started)
|
||||||
|
*/
|
||||||
|
getServer(): TsmdbServer {
|
||||||
|
if (!this.server) {
|
||||||
|
throw new Error('LocalTsmDb is not running. Call start() first.');
|
||||||
|
}
|
||||||
|
return this.server;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the connection URI
|
||||||
|
*/
|
||||||
|
getConnectionUri(): string {
|
||||||
|
if (!this.server) {
|
||||||
|
throw new Error('LocalTsmDb is not running. Call start() first.');
|
||||||
|
}
|
||||||
|
return this.server.getConnectionUri();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the server is running
|
||||||
|
*/
|
||||||
|
get running(): boolean {
|
||||||
|
return this.server !== null && this.server.running;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop the local TsmDB server and close the client connection
|
||||||
|
*/
|
||||||
|
async stop(): Promise<void> {
|
||||||
|
if (this.client) {
|
||||||
|
await this.client.close();
|
||||||
|
this.client = null;
|
||||||
|
}
|
||||||
|
if (this.server) {
|
||||||
|
await this.server.stop();
|
||||||
|
this.server = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
2
ts/ts_local/index.ts
Normal file
2
ts/ts_local/index.ts
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
export { LocalTsmDb } from './classes.localtsmdb.js';
|
||||||
|
export type { ILocalTsmDbOptions } from './classes.localtsmdb.js';
|
||||||
4
ts/ts_local/plugins.ts
Normal file
4
ts/ts_local/plugins.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
import * as smartpromise from '@push.rocks/smartpromise';
|
||||||
|
import * as net from 'net';
|
||||||
|
|
||||||
|
export { smartpromise, net };
|
||||||
71
ts/ts_mongotools/classes.smartmongo.ts
Normal file
71
ts/ts_mongotools/classes.smartmongo.ts
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
import { commitinfo } from '../00_commitinfo_data.js';
|
||||||
|
import * as plugins from './plugins.js';
|
||||||
|
|
||||||
|
export class SmartMongo {
|
||||||
|
// STATIC
|
||||||
|
public static async createAndStart(replCountArg: number = 1) {
|
||||||
|
const smartMongoInstance = new SmartMongo();
|
||||||
|
await smartMongoInstance.start(replCountArg);
|
||||||
|
return smartMongoInstance;
|
||||||
|
}
|
||||||
|
|
||||||
|
// INSTANCE
|
||||||
|
private _readyDeferred = plugins.smartpromise.defer();
|
||||||
|
public readyPromise = this._readyDeferred.promise;
|
||||||
|
public mongoReplicaSet: plugins.mongoPlugin.MongoMemoryReplSet;
|
||||||
|
|
||||||
|
constructor() {}
|
||||||
|
|
||||||
|
public async start(countArg: number = 1) {
|
||||||
|
this.mongoReplicaSet = await plugins.mongoPlugin.MongoMemoryReplSet.create({
|
||||||
|
replSet: { count: countArg },
|
||||||
|
instanceOpts: [
|
||||||
|
{
|
||||||
|
storageEngine: 'wiredTiger',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
this._readyDeferred.resolve();
|
||||||
|
console.log(`mongoReplicaSet with ${countArg} replicas started.`);
|
||||||
|
console.log(`@pushrocks/smartmongo version ${commitinfo.version}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* returns a mongo descriptor for modules like
|
||||||
|
* @pushrocks/smartfile.
|
||||||
|
*/
|
||||||
|
public async getMongoDescriptor(): Promise<plugins.smartdata.IMongoDescriptor> {
|
||||||
|
await this.readyPromise;
|
||||||
|
return {
|
||||||
|
mongoDbName: `smartmongo_testdatabase`,
|
||||||
|
mongoDbUrl: this.mongoReplicaSet.getUri(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* stops the smartmongo instance
|
||||||
|
* and cleans up after itself
|
||||||
|
*/
|
||||||
|
public async stop() {
|
||||||
|
await this.mongoReplicaSet.stop();
|
||||||
|
await this.mongoReplicaSet.cleanup();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* like stop() but allows you to actually store
|
||||||
|
* the database on disk
|
||||||
|
*/
|
||||||
|
public async stopAndDumpToDir(
|
||||||
|
dirArg: string,
|
||||||
|
nameFunctionArg?: (doc: any) => string,
|
||||||
|
emptyDirArg = true,
|
||||||
|
) {
|
||||||
|
const mongodumpInstance = new plugins.mongodump.MongoDump();
|
||||||
|
const mongodumpTarget = await mongodumpInstance.addMongoTargetByMongoDescriptor(
|
||||||
|
await this.getMongoDescriptor(),
|
||||||
|
);
|
||||||
|
await mongodumpTarget.dumpAllCollectionsToDir(dirArg, nameFunctionArg, emptyDirArg);
|
||||||
|
await mongodumpInstance.stop();
|
||||||
|
await this.stop();
|
||||||
|
}
|
||||||
|
}
|
||||||
2
ts/ts_mongotools/index.ts
Normal file
2
ts/ts_mongotools/index.ts
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
export * from './plugins.js';
|
||||||
|
export { SmartMongo } from './classes.smartmongo.js';
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import * as plugins from '../tsmdb.plugins.js';
|
import * as plugins from '../plugins.js';
|
||||||
import type { Document, IStoredDocument, IAggregateOptions } from '../types/interfaces.js';
|
import type { Document, IStoredDocument, IAggregateOptions } from '../types/interfaces.js';
|
||||||
|
|
||||||
// Import mingo Aggregator
|
// Import mingo Aggregator
|
||||||
798
ts/ts_tsmdb/engine/IndexEngine.ts
Normal file
798
ts/ts_tsmdb/engine/IndexEngine.ts
Normal file
@@ -0,0 +1,798 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { IStorageAdapter } from '../storage/IStorageAdapter.js';
|
||||||
|
|
||||||
|
// Simple B-Tree implementation for range queries
|
||||||
|
// Since sorted-btree has ESM/CJS interop issues, we use a simple custom implementation
|
||||||
|
class SimpleBTree<K, V> {
|
||||||
|
private entries: Map<string, { key: K; value: V }> = new Map();
|
||||||
|
private sortedKeys: K[] = [];
|
||||||
|
private comparator: (a: K, b: K) => number;
|
||||||
|
|
||||||
|
constructor(_unused?: undefined, comparator?: (a: K, b: K) => number) {
|
||||||
|
this.comparator = comparator || ((a: K, b: K) => {
|
||||||
|
if (a < b) return -1;
|
||||||
|
if (a > b) return 1;
|
||||||
|
return 0;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private keyToString(key: K): string {
|
||||||
|
return JSON.stringify(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
set(key: K, value: V): boolean {
|
||||||
|
const keyStr = this.keyToString(key);
|
||||||
|
const existed = this.entries.has(keyStr);
|
||||||
|
this.entries.set(keyStr, { key, value });
|
||||||
|
|
||||||
|
if (!existed) {
|
||||||
|
// Insert in sorted order
|
||||||
|
const idx = this.sortedKeys.findIndex(k => this.comparator(k, key) > 0);
|
||||||
|
if (idx === -1) {
|
||||||
|
this.sortedKeys.push(key);
|
||||||
|
} else {
|
||||||
|
this.sortedKeys.splice(idx, 0, key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return !existed;
|
||||||
|
}
|
||||||
|
|
||||||
|
get(key: K): V | undefined {
|
||||||
|
const entry = this.entries.get(this.keyToString(key));
|
||||||
|
return entry?.value;
|
||||||
|
}
|
||||||
|
|
||||||
|
delete(key: K): boolean {
|
||||||
|
const keyStr = this.keyToString(key);
|
||||||
|
if (this.entries.has(keyStr)) {
|
||||||
|
this.entries.delete(keyStr);
|
||||||
|
const idx = this.sortedKeys.findIndex(k => this.comparator(k, key) === 0);
|
||||||
|
if (idx !== -1) {
|
||||||
|
this.sortedKeys.splice(idx, 1);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
forRange(
|
||||||
|
lowKey: K | undefined,
|
||||||
|
highKey: K | undefined,
|
||||||
|
lowInclusive: boolean,
|
||||||
|
highInclusive: boolean,
|
||||||
|
callback: (value: V, key: K) => void
|
||||||
|
): void {
|
||||||
|
for (const key of this.sortedKeys) {
|
||||||
|
// Check low bound
|
||||||
|
if (lowKey !== undefined) {
|
||||||
|
const cmp = this.comparator(key, lowKey);
|
||||||
|
if (cmp < 0) continue;
|
||||||
|
if (cmp === 0 && !lowInclusive) continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check high bound
|
||||||
|
if (highKey !== undefined) {
|
||||||
|
const cmp = this.comparator(key, highKey);
|
||||||
|
if (cmp > 0) break;
|
||||||
|
if (cmp === 0 && !highInclusive) break;
|
||||||
|
}
|
||||||
|
|
||||||
|
const entry = this.entries.get(this.keyToString(key));
|
||||||
|
if (entry) {
|
||||||
|
callback(entry.value, key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
import type {
|
||||||
|
Document,
|
||||||
|
IStoredDocument,
|
||||||
|
IIndexSpecification,
|
||||||
|
IIndexInfo,
|
||||||
|
ICreateIndexOptions,
|
||||||
|
} from '../types/interfaces.js';
|
||||||
|
import { TsmdbDuplicateKeyError, TsmdbIndexError } from '../errors/TsmdbErrors.js';
|
||||||
|
import { QueryEngine } from './QueryEngine.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Comparator for B-Tree that handles mixed types consistently
|
||||||
|
*/
|
||||||
|
function indexKeyComparator(a: any, b: any): number {
|
||||||
|
// Handle null/undefined
|
||||||
|
if (a === null || a === undefined) {
|
||||||
|
if (b === null || b === undefined) return 0;
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
if (b === null || b === undefined) return 1;
|
||||||
|
|
||||||
|
// Handle arrays (compound keys)
|
||||||
|
if (Array.isArray(a) && Array.isArray(b)) {
|
||||||
|
for (let i = 0; i < Math.max(a.length, b.length); i++) {
|
||||||
|
const cmp = indexKeyComparator(a[i], b[i]);
|
||||||
|
if (cmp !== 0) return cmp;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle ObjectId
|
||||||
|
if (a instanceof plugins.bson.ObjectId && b instanceof plugins.bson.ObjectId) {
|
||||||
|
return a.toHexString().localeCompare(b.toHexString());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle Date
|
||||||
|
if (a instanceof Date && b instanceof Date) {
|
||||||
|
return a.getTime() - b.getTime();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle different types - use type ordering (null < number < string < object)
|
||||||
|
const typeOrder = (v: any): number => {
|
||||||
|
if (v === null || v === undefined) return 0;
|
||||||
|
if (typeof v === 'number') return 1;
|
||||||
|
if (typeof v === 'string') return 2;
|
||||||
|
if (typeof v === 'boolean') return 3;
|
||||||
|
if (v instanceof Date) return 4;
|
||||||
|
if (v instanceof plugins.bson.ObjectId) return 5;
|
||||||
|
return 6;
|
||||||
|
};
|
||||||
|
|
||||||
|
const typeA = typeOrder(a);
|
||||||
|
const typeB = typeOrder(b);
|
||||||
|
if (typeA !== typeB) return typeA - typeB;
|
||||||
|
|
||||||
|
// Same type comparison
|
||||||
|
if (typeof a === 'number') return a - b;
|
||||||
|
if (typeof a === 'string') return a.localeCompare(b);
|
||||||
|
if (typeof a === 'boolean') return (a ? 1 : 0) - (b ? 1 : 0);
|
||||||
|
|
||||||
|
// Fallback to string comparison
|
||||||
|
return String(a).localeCompare(String(b));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index data structure using B-Tree for range queries
|
||||||
|
*/
|
||||||
|
interface IIndexData {
|
||||||
|
name: string;
|
||||||
|
key: Record<string, 1 | -1 | string>;
|
||||||
|
unique: boolean;
|
||||||
|
sparse: boolean;
|
||||||
|
expireAfterSeconds?: number;
|
||||||
|
// B-Tree for ordered index lookups (supports range queries)
|
||||||
|
btree: SimpleBTree<any, Set<string>>;
|
||||||
|
// Hash map for fast equality lookups
|
||||||
|
hashMap: Map<string, Set<string>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index engine for managing indexes and query optimization
|
||||||
|
*/
|
||||||
|
export class IndexEngine {
|
||||||
|
private dbName: string;
|
||||||
|
private collName: string;
|
||||||
|
private storage: IStorageAdapter;
|
||||||
|
private indexes: Map<string, IIndexData> = new Map();
|
||||||
|
private initialized = false;
|
||||||
|
|
||||||
|
constructor(dbName: string, collName: string, storage: IStorageAdapter) {
|
||||||
|
this.dbName = dbName;
|
||||||
|
this.collName = collName;
|
||||||
|
this.storage = storage;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize indexes from storage
|
||||||
|
*/
|
||||||
|
async initialize(): Promise<void> {
|
||||||
|
if (this.initialized) return;
|
||||||
|
|
||||||
|
const storedIndexes = await this.storage.getIndexes(this.dbName, this.collName);
|
||||||
|
const documents = await this.storage.findAll(this.dbName, this.collName);
|
||||||
|
|
||||||
|
for (const indexSpec of storedIndexes) {
|
||||||
|
const indexData: IIndexData = {
|
||||||
|
name: indexSpec.name,
|
||||||
|
key: indexSpec.key,
|
||||||
|
unique: indexSpec.unique || false,
|
||||||
|
sparse: indexSpec.sparse || false,
|
||||||
|
expireAfterSeconds: indexSpec.expireAfterSeconds,
|
||||||
|
btree: new SimpleBTree<any, Set<string>>(undefined, indexKeyComparator),
|
||||||
|
hashMap: new Map(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Build index entries
|
||||||
|
for (const doc of documents) {
|
||||||
|
const keyValue = this.extractKeyValue(doc, indexSpec.key);
|
||||||
|
if (keyValue !== null || !indexData.sparse) {
|
||||||
|
const keyStr = JSON.stringify(keyValue);
|
||||||
|
|
||||||
|
// Add to hash map
|
||||||
|
if (!indexData.hashMap.has(keyStr)) {
|
||||||
|
indexData.hashMap.set(keyStr, new Set());
|
||||||
|
}
|
||||||
|
indexData.hashMap.get(keyStr)!.add(doc._id.toHexString());
|
||||||
|
|
||||||
|
// Add to B-tree
|
||||||
|
const existing = indexData.btree.get(keyValue);
|
||||||
|
if (existing) {
|
||||||
|
existing.add(doc._id.toHexString());
|
||||||
|
} else {
|
||||||
|
indexData.btree.set(keyValue, new Set([doc._id.toHexString()]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.indexes.set(indexSpec.name, indexData);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.initialized = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new index
|
||||||
|
*/
|
||||||
|
async createIndex(
|
||||||
|
key: Record<string, 1 | -1 | 'text' | '2dsphere'>,
|
||||||
|
options?: ICreateIndexOptions
|
||||||
|
): Promise<string> {
|
||||||
|
await this.initialize();
|
||||||
|
|
||||||
|
// Generate index name if not provided
|
||||||
|
const name = options?.name || this.generateIndexName(key);
|
||||||
|
|
||||||
|
// Check if index already exists
|
||||||
|
if (this.indexes.has(name)) {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create index data structure
|
||||||
|
const indexData: IIndexData = {
|
||||||
|
name,
|
||||||
|
key: key as Record<string, 1 | -1 | string>,
|
||||||
|
unique: options?.unique || false,
|
||||||
|
sparse: options?.sparse || false,
|
||||||
|
expireAfterSeconds: options?.expireAfterSeconds,
|
||||||
|
btree: new SimpleBTree<any, Set<string>>(undefined, indexKeyComparator),
|
||||||
|
hashMap: new Map(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Build index from existing documents
|
||||||
|
const documents = await this.storage.findAll(this.dbName, this.collName);
|
||||||
|
|
||||||
|
for (const doc of documents) {
|
||||||
|
const keyValue = this.extractKeyValue(doc, key);
|
||||||
|
|
||||||
|
if (keyValue === null && indexData.sparse) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const keyStr = JSON.stringify(keyValue);
|
||||||
|
|
||||||
|
if (indexData.unique && indexData.hashMap.has(keyStr)) {
|
||||||
|
throw new TsmdbDuplicateKeyError(
|
||||||
|
`E11000 duplicate key error index: ${this.dbName}.${this.collName}.$${name}`,
|
||||||
|
key as Record<string, 1>,
|
||||||
|
keyValue
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add to hash map
|
||||||
|
if (!indexData.hashMap.has(keyStr)) {
|
||||||
|
indexData.hashMap.set(keyStr, new Set());
|
||||||
|
}
|
||||||
|
indexData.hashMap.get(keyStr)!.add(doc._id.toHexString());
|
||||||
|
|
||||||
|
// Add to B-tree
|
||||||
|
const existing = indexData.btree.get(keyValue);
|
||||||
|
if (existing) {
|
||||||
|
existing.add(doc._id.toHexString());
|
||||||
|
} else {
|
||||||
|
indexData.btree.set(keyValue, new Set([doc._id.toHexString()]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store index
|
||||||
|
this.indexes.set(name, indexData);
|
||||||
|
await this.storage.saveIndex(this.dbName, this.collName, name, {
|
||||||
|
key,
|
||||||
|
unique: options?.unique,
|
||||||
|
sparse: options?.sparse,
|
||||||
|
expireAfterSeconds: options?.expireAfterSeconds,
|
||||||
|
});
|
||||||
|
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Drop an index
|
||||||
|
*/
|
||||||
|
async dropIndex(name: string): Promise<void> {
|
||||||
|
await this.initialize();
|
||||||
|
|
||||||
|
if (name === '_id_') {
|
||||||
|
throw new TsmdbIndexError('cannot drop _id index');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.indexes.has(name)) {
|
||||||
|
throw new TsmdbIndexError(`index not found: ${name}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.indexes.delete(name);
|
||||||
|
await this.storage.dropIndex(this.dbName, this.collName, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Drop all indexes except _id
|
||||||
|
*/
|
||||||
|
async dropAllIndexes(): Promise<void> {
|
||||||
|
await this.initialize();
|
||||||
|
|
||||||
|
const names = Array.from(this.indexes.keys()).filter(n => n !== '_id_');
|
||||||
|
for (const name of names) {
|
||||||
|
this.indexes.delete(name);
|
||||||
|
await this.storage.dropIndex(this.dbName, this.collName, name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List all indexes
|
||||||
|
*/
|
||||||
|
async listIndexes(): Promise<IIndexInfo[]> {
|
||||||
|
await this.initialize();
|
||||||
|
|
||||||
|
return Array.from(this.indexes.values()).map(idx => ({
|
||||||
|
v: 2,
|
||||||
|
key: idx.key,
|
||||||
|
name: idx.name,
|
||||||
|
unique: idx.unique || undefined,
|
||||||
|
sparse: idx.sparse || undefined,
|
||||||
|
expireAfterSeconds: idx.expireAfterSeconds,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if an index exists
|
||||||
|
*/
|
||||||
|
async indexExists(name: string): Promise<boolean> {
|
||||||
|
await this.initialize();
|
||||||
|
return this.indexes.has(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update index entries after document insert
|
||||||
|
*/
|
||||||
|
async onInsert(doc: IStoredDocument): Promise<void> {
|
||||||
|
await this.initialize();
|
||||||
|
|
||||||
|
for (const [name, indexData] of this.indexes) {
|
||||||
|
const keyValue = this.extractKeyValue(doc, indexData.key);
|
||||||
|
|
||||||
|
if (keyValue === null && indexData.sparse) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const keyStr = JSON.stringify(keyValue);
|
||||||
|
|
||||||
|
// Check unique constraint
|
||||||
|
if (indexData.unique) {
|
||||||
|
const existing = indexData.hashMap.get(keyStr);
|
||||||
|
if (existing && existing.size > 0) {
|
||||||
|
throw new TsmdbDuplicateKeyError(
|
||||||
|
`E11000 duplicate key error collection: ${this.dbName}.${this.collName} index: ${name}`,
|
||||||
|
indexData.key as Record<string, 1>,
|
||||||
|
keyValue
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add to hash map
|
||||||
|
if (!indexData.hashMap.has(keyStr)) {
|
||||||
|
indexData.hashMap.set(keyStr, new Set());
|
||||||
|
}
|
||||||
|
indexData.hashMap.get(keyStr)!.add(doc._id.toHexString());
|
||||||
|
|
||||||
|
// Add to B-tree
|
||||||
|
const btreeSet = indexData.btree.get(keyValue);
|
||||||
|
if (btreeSet) {
|
||||||
|
btreeSet.add(doc._id.toHexString());
|
||||||
|
} else {
|
||||||
|
indexData.btree.set(keyValue, new Set([doc._id.toHexString()]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update index entries after document update
|
||||||
|
*/
|
||||||
|
async onUpdate(oldDoc: IStoredDocument, newDoc: IStoredDocument): Promise<void> {
|
||||||
|
await this.initialize();
|
||||||
|
|
||||||
|
for (const [name, indexData] of this.indexes) {
|
||||||
|
const oldKeyValue = this.extractKeyValue(oldDoc, indexData.key);
|
||||||
|
const newKeyValue = this.extractKeyValue(newDoc, indexData.key);
|
||||||
|
const oldKeyStr = JSON.stringify(oldKeyValue);
|
||||||
|
const newKeyStr = JSON.stringify(newKeyValue);
|
||||||
|
|
||||||
|
// Remove old entry if key changed
|
||||||
|
if (oldKeyStr !== newKeyStr) {
|
||||||
|
if (oldKeyValue !== null || !indexData.sparse) {
|
||||||
|
// Remove from hash map
|
||||||
|
const oldHashSet = indexData.hashMap.get(oldKeyStr);
|
||||||
|
if (oldHashSet) {
|
||||||
|
oldHashSet.delete(oldDoc._id.toHexString());
|
||||||
|
if (oldHashSet.size === 0) {
|
||||||
|
indexData.hashMap.delete(oldKeyStr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove from B-tree
|
||||||
|
const oldBtreeSet = indexData.btree.get(oldKeyValue);
|
||||||
|
if (oldBtreeSet) {
|
||||||
|
oldBtreeSet.delete(oldDoc._id.toHexString());
|
||||||
|
if (oldBtreeSet.size === 0) {
|
||||||
|
indexData.btree.delete(oldKeyValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add new entry
|
||||||
|
if (newKeyValue !== null || !indexData.sparse) {
|
||||||
|
// Check unique constraint
|
||||||
|
if (indexData.unique) {
|
||||||
|
const existing = indexData.hashMap.get(newKeyStr);
|
||||||
|
if (existing && existing.size > 0) {
|
||||||
|
throw new TsmdbDuplicateKeyError(
|
||||||
|
`E11000 duplicate key error collection: ${this.dbName}.${this.collName} index: ${name}`,
|
||||||
|
indexData.key as Record<string, 1>,
|
||||||
|
newKeyValue
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add to hash map
|
||||||
|
if (!indexData.hashMap.has(newKeyStr)) {
|
||||||
|
indexData.hashMap.set(newKeyStr, new Set());
|
||||||
|
}
|
||||||
|
indexData.hashMap.get(newKeyStr)!.add(newDoc._id.toHexString());
|
||||||
|
|
||||||
|
// Add to B-tree
|
||||||
|
const newBtreeSet = indexData.btree.get(newKeyValue);
|
||||||
|
if (newBtreeSet) {
|
||||||
|
newBtreeSet.add(newDoc._id.toHexString());
|
||||||
|
} else {
|
||||||
|
indexData.btree.set(newKeyValue, new Set([newDoc._id.toHexString()]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update index entries after document delete
|
||||||
|
*/
|
||||||
|
async onDelete(doc: IStoredDocument): Promise<void> {
|
||||||
|
await this.initialize();
|
||||||
|
|
||||||
|
for (const indexData of this.indexes.values()) {
|
||||||
|
const keyValue = this.extractKeyValue(doc, indexData.key);
|
||||||
|
|
||||||
|
if (keyValue === null && indexData.sparse) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const keyStr = JSON.stringify(keyValue);
|
||||||
|
|
||||||
|
// Remove from hash map
|
||||||
|
const hashSet = indexData.hashMap.get(keyStr);
|
||||||
|
if (hashSet) {
|
||||||
|
hashSet.delete(doc._id.toHexString());
|
||||||
|
if (hashSet.size === 0) {
|
||||||
|
indexData.hashMap.delete(keyStr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove from B-tree
|
||||||
|
const btreeSet = indexData.btree.get(keyValue);
|
||||||
|
if (btreeSet) {
|
||||||
|
btreeSet.delete(doc._id.toHexString());
|
||||||
|
if (btreeSet.size === 0) {
|
||||||
|
indexData.btree.delete(keyValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find the best index for a query
|
||||||
|
*/
|
||||||
|
selectIndex(filter: Document): { name: string; data: IIndexData } | null {
|
||||||
|
if (!filter || Object.keys(filter).length === 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get filter fields and operators
|
||||||
|
const filterInfo = this.analyzeFilter(filter);
|
||||||
|
|
||||||
|
// Score each index
|
||||||
|
let bestIndex: { name: string; data: IIndexData } | null = null;
|
||||||
|
let bestScore = 0;
|
||||||
|
|
||||||
|
for (const [name, indexData] of this.indexes) {
|
||||||
|
const indexFields = Object.keys(indexData.key);
|
||||||
|
let score = 0;
|
||||||
|
|
||||||
|
// Count how many index fields can be used
|
||||||
|
for (const field of indexFields) {
|
||||||
|
const info = filterInfo.get(field);
|
||||||
|
if (!info) break;
|
||||||
|
|
||||||
|
// Equality is best
|
||||||
|
if (info.equality) {
|
||||||
|
score += 2;
|
||||||
|
} else if (info.range) {
|
||||||
|
// Range queries can use B-tree
|
||||||
|
score += 1;
|
||||||
|
} else if (info.in) {
|
||||||
|
score += 1.5;
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prefer unique indexes
|
||||||
|
if (indexData.unique && score > 0) {
|
||||||
|
score += 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (score > bestScore) {
|
||||||
|
bestScore = score;
|
||||||
|
bestIndex = { name, data: indexData };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return bestIndex;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Analyze filter to extract field operators
|
||||||
|
*/
|
||||||
|
private analyzeFilter(filter: Document): Map<string, { equality: boolean; range: boolean; in: boolean; ops: Record<string, any> }> {
|
||||||
|
const result = new Map<string, { equality: boolean; range: boolean; in: boolean; ops: Record<string, any> }>();
|
||||||
|
|
||||||
|
for (const [key, value] of Object.entries(filter)) {
|
||||||
|
if (key.startsWith('$')) continue;
|
||||||
|
|
||||||
|
const info = { equality: false, range: false, in: false, ops: {} as Record<string, any> };
|
||||||
|
|
||||||
|
if (typeof value !== 'object' || value === null || value instanceof plugins.bson.ObjectId || value instanceof Date) {
|
||||||
|
info.equality = true;
|
||||||
|
info.ops['$eq'] = value;
|
||||||
|
} else {
|
||||||
|
const ops = value as Record<string, any>;
|
||||||
|
if (ops.$eq !== undefined) {
|
||||||
|
info.equality = true;
|
||||||
|
info.ops['$eq'] = ops.$eq;
|
||||||
|
}
|
||||||
|
if (ops.$in !== undefined) {
|
||||||
|
info.in = true;
|
||||||
|
info.ops['$in'] = ops.$in;
|
||||||
|
}
|
||||||
|
if (ops.$gt !== undefined || ops.$gte !== undefined || ops.$lt !== undefined || ops.$lte !== undefined) {
|
||||||
|
info.range = true;
|
||||||
|
if (ops.$gt !== undefined) info.ops['$gt'] = ops.$gt;
|
||||||
|
if (ops.$gte !== undefined) info.ops['$gte'] = ops.$gte;
|
||||||
|
if (ops.$lt !== undefined) info.ops['$lt'] = ops.$lt;
|
||||||
|
if (ops.$lte !== undefined) info.ops['$lte'] = ops.$lte;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result.set(key, info);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Use index to find candidate document IDs (supports range queries with B-tree)
|
||||||
|
*/
|
||||||
|
async findCandidateIds(filter: Document): Promise<Set<string> | null> {
|
||||||
|
await this.initialize();
|
||||||
|
|
||||||
|
const index = this.selectIndex(filter);
|
||||||
|
if (!index) return null;
|
||||||
|
|
||||||
|
const filterInfo = this.analyzeFilter(filter);
|
||||||
|
const indexFields = Object.keys(index.data.key);
|
||||||
|
|
||||||
|
// For single-field indexes with range queries, use B-tree
|
||||||
|
if (indexFields.length === 1) {
|
||||||
|
const field = indexFields[0];
|
||||||
|
const info = filterInfo.get(field);
|
||||||
|
|
||||||
|
if (info) {
|
||||||
|
// Handle equality using hash map (faster)
|
||||||
|
if (info.equality) {
|
||||||
|
const keyStr = JSON.stringify(info.ops['$eq']);
|
||||||
|
return index.data.hashMap.get(keyStr) || new Set();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle $in using hash map
|
||||||
|
if (info.in) {
|
||||||
|
const results = new Set<string>();
|
||||||
|
for (const val of info.ops['$in']) {
|
||||||
|
const keyStr = JSON.stringify(val);
|
||||||
|
const ids = index.data.hashMap.get(keyStr);
|
||||||
|
if (ids) {
|
||||||
|
for (const id of ids) {
|
||||||
|
results.add(id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle range queries using B-tree
|
||||||
|
if (info.range) {
|
||||||
|
return this.findRangeCandidates(index.data, info.ops);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// For compound indexes, use hash map with partial key matching
|
||||||
|
const equalityValues: Record<string, any> = {};
|
||||||
|
|
||||||
|
for (const field of indexFields) {
|
||||||
|
const info = filterInfo.get(field);
|
||||||
|
if (!info) break;
|
||||||
|
|
||||||
|
if (info.equality) {
|
||||||
|
equalityValues[field] = info.ops['$eq'];
|
||||||
|
} else if (info.in) {
|
||||||
|
// Handle $in with multiple lookups
|
||||||
|
const results = new Set<string>();
|
||||||
|
for (const val of info.ops['$in']) {
|
||||||
|
equalityValues[field] = val;
|
||||||
|
const keyStr = JSON.stringify(this.buildKeyValue(equalityValues, index.data.key));
|
||||||
|
const ids = index.data.hashMap.get(keyStr);
|
||||||
|
if (ids) {
|
||||||
|
for (const id of ids) {
|
||||||
|
results.add(id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
} else {
|
||||||
|
break; // Non-equality/in operator, stop here
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Object.keys(equalityValues).length > 0) {
|
||||||
|
const keyStr = JSON.stringify(this.buildKeyValue(equalityValues, index.data.key));
|
||||||
|
return index.data.hashMap.get(keyStr) || new Set();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find candidates using B-tree range scan
|
||||||
|
*/
|
||||||
|
private findRangeCandidates(indexData: IIndexData, ops: Record<string, any>): Set<string> {
|
||||||
|
const results = new Set<string>();
|
||||||
|
|
||||||
|
let lowKey: any = undefined;
|
||||||
|
let highKey: any = undefined;
|
||||||
|
let lowInclusive = true;
|
||||||
|
let highInclusive = true;
|
||||||
|
|
||||||
|
if (ops['$gt'] !== undefined) {
|
||||||
|
lowKey = ops['$gt'];
|
||||||
|
lowInclusive = false;
|
||||||
|
}
|
||||||
|
if (ops['$gte'] !== undefined) {
|
||||||
|
lowKey = ops['$gte'];
|
||||||
|
lowInclusive = true;
|
||||||
|
}
|
||||||
|
if (ops['$lt'] !== undefined) {
|
||||||
|
highKey = ops['$lt'];
|
||||||
|
highInclusive = false;
|
||||||
|
}
|
||||||
|
if (ops['$lte'] !== undefined) {
|
||||||
|
highKey = ops['$lte'];
|
||||||
|
highInclusive = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use B-tree range iteration
|
||||||
|
indexData.btree.forRange(lowKey, highKey, lowInclusive, highInclusive, (value, key) => {
|
||||||
|
if (value) {
|
||||||
|
for (const id of value) {
|
||||||
|
results.add(id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Helper Methods
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
private generateIndexName(key: Record<string, any>): string {
|
||||||
|
return Object.entries(key)
|
||||||
|
.map(([field, dir]) => `${field}_${dir}`)
|
||||||
|
.join('_');
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractKeyValue(doc: Document, key: Record<string, any>): any {
|
||||||
|
const values: any[] = [];
|
||||||
|
|
||||||
|
for (const field of Object.keys(key)) {
|
||||||
|
const value = QueryEngine.getNestedValue(doc, field);
|
||||||
|
values.push(value === undefined ? null : value);
|
||||||
|
}
|
||||||
|
|
||||||
|
// For single-field index, return the value directly
|
||||||
|
if (values.length === 1) {
|
||||||
|
return values[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
return values;
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildKeyValue(values: Record<string, any>, key: Record<string, any>): any {
|
||||||
|
const result: any[] = [];
|
||||||
|
|
||||||
|
for (const field of Object.keys(key)) {
|
||||||
|
result.push(values[field] !== undefined ? values[field] : null);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.length === 1) {
|
||||||
|
return result[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
private getFilterFields(filter: Document, prefix = ''): string[] {
|
||||||
|
const fields: string[] = [];
|
||||||
|
|
||||||
|
for (const [key, value] of Object.entries(filter)) {
|
||||||
|
if (key.startsWith('$')) {
|
||||||
|
// Logical operator
|
||||||
|
if (key === '$and' || key === '$or' || key === '$nor') {
|
||||||
|
for (const subFilter of value as Document[]) {
|
||||||
|
fields.push(...this.getFilterFields(subFilter, prefix));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const fullKey = prefix ? `${prefix}.${key}` : key;
|
||||||
|
fields.push(fullKey);
|
||||||
|
|
||||||
|
// Check for nested filters
|
||||||
|
if (typeof value === 'object' && value !== null && !Array.isArray(value)) {
|
||||||
|
const subKeys = Object.keys(value);
|
||||||
|
if (subKeys.length > 0 && !subKeys[0].startsWith('$')) {
|
||||||
|
fields.push(...this.getFilterFields(value, fullKey));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return fields;
|
||||||
|
}
|
||||||
|
|
||||||
|
private getFilterValue(filter: Document, field: string): any {
|
||||||
|
// Handle dot notation
|
||||||
|
const parts = field.split('.');
|
||||||
|
let current: any = filter;
|
||||||
|
|
||||||
|
for (const part of parts) {
|
||||||
|
if (current === null || current === undefined) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
current = current[part];
|
||||||
|
}
|
||||||
|
|
||||||
|
return current;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import * as plugins from '../tsmdb.plugins.js';
|
import * as plugins from '../plugins.js';
|
||||||
import type { Document, IStoredDocument, ISortSpecification, ISortDirection } from '../types/interfaces.js';
|
import type { Document, IStoredDocument, ISortSpecification, ISortDirection } from '../types/interfaces.js';
|
||||||
|
|
||||||
// Import mingo Query class
|
// Import mingo Query class
|
||||||
393
ts/ts_tsmdb/engine/QueryPlanner.ts
Normal file
393
ts/ts_tsmdb/engine/QueryPlanner.ts
Normal file
@@ -0,0 +1,393 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { Document, IStoredDocument } from '../types/interfaces.js';
|
||||||
|
import { IndexEngine } from './IndexEngine.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Query execution plan types
|
||||||
|
*/
|
||||||
|
export type TQueryPlanType = 'IXSCAN' | 'COLLSCAN' | 'FETCH' | 'IXSCAN_RANGE';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents a query execution plan
|
||||||
|
*/
|
||||||
|
export interface IQueryPlan {
|
||||||
|
/** The type of scan used */
|
||||||
|
type: TQueryPlanType;
|
||||||
|
/** Index name if using an index */
|
||||||
|
indexName?: string;
|
||||||
|
/** Index key specification */
|
||||||
|
indexKey?: Record<string, 1 | -1 | string>;
|
||||||
|
/** Whether the query can be fully satisfied by the index */
|
||||||
|
indexCovering: boolean;
|
||||||
|
/** Estimated selectivity (0-1, lower is more selective) */
|
||||||
|
selectivity: number;
|
||||||
|
/** Whether range operators are used */
|
||||||
|
usesRange: boolean;
|
||||||
|
/** Fields used from the index */
|
||||||
|
indexFieldsUsed: string[];
|
||||||
|
/** Filter conditions that must be applied post-index lookup */
|
||||||
|
residualFilter?: Document;
|
||||||
|
/** Explanation for debugging */
|
||||||
|
explanation: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter operator analysis
|
||||||
|
*/
|
||||||
|
interface IFilterOperatorInfo {
|
||||||
|
field: string;
|
||||||
|
operators: string[];
|
||||||
|
equality: boolean;
|
||||||
|
range: boolean;
|
||||||
|
in: boolean;
|
||||||
|
exists: boolean;
|
||||||
|
regex: boolean;
|
||||||
|
values: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* QueryPlanner - Analyzes queries and selects optimal execution plans
|
||||||
|
*/
|
||||||
|
export class QueryPlanner {
|
||||||
|
private indexEngine: IndexEngine;
|
||||||
|
|
||||||
|
constructor(indexEngine: IndexEngine) {
|
||||||
|
this.indexEngine = indexEngine;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate an execution plan for a query filter
|
||||||
|
*/
|
||||||
|
async plan(filter: Document): Promise<IQueryPlan> {
|
||||||
|
await this.indexEngine['initialize']();
|
||||||
|
|
||||||
|
// Empty filter = full collection scan
|
||||||
|
if (!filter || Object.keys(filter).length === 0) {
|
||||||
|
return {
|
||||||
|
type: 'COLLSCAN',
|
||||||
|
indexCovering: false,
|
||||||
|
selectivity: 1.0,
|
||||||
|
usesRange: false,
|
||||||
|
indexFieldsUsed: [],
|
||||||
|
explanation: 'No filter specified, full collection scan required',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Analyze the filter
|
||||||
|
const operatorInfo = this.analyzeFilter(filter);
|
||||||
|
|
||||||
|
// Get available indexes
|
||||||
|
const indexes = await this.indexEngine.listIndexes();
|
||||||
|
|
||||||
|
// Score each index
|
||||||
|
let bestPlan: IQueryPlan | null = null;
|
||||||
|
let bestScore = -1;
|
||||||
|
|
||||||
|
for (const index of indexes) {
|
||||||
|
const plan = this.scoreIndex(index, operatorInfo, filter);
|
||||||
|
if (plan.selectivity < 1.0) {
|
||||||
|
const score = this.calculateScore(plan);
|
||||||
|
if (score > bestScore) {
|
||||||
|
bestScore = score;
|
||||||
|
bestPlan = plan;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If no suitable index found, fall back to collection scan
|
||||||
|
if (!bestPlan || bestScore <= 0) {
|
||||||
|
return {
|
||||||
|
type: 'COLLSCAN',
|
||||||
|
indexCovering: false,
|
||||||
|
selectivity: 1.0,
|
||||||
|
usesRange: false,
|
||||||
|
indexFieldsUsed: [],
|
||||||
|
explanation: 'No suitable index found for this query',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return bestPlan;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Analyze filter to extract operator information per field
|
||||||
|
*/
|
||||||
|
private analyzeFilter(filter: Document, prefix = ''): Map<string, IFilterOperatorInfo> {
|
||||||
|
const result = new Map<string, IFilterOperatorInfo>();
|
||||||
|
|
||||||
|
for (const [key, value] of Object.entries(filter)) {
|
||||||
|
// Skip logical operators at the top level
|
||||||
|
if (key.startsWith('$')) {
|
||||||
|
if (key === '$and' && Array.isArray(value)) {
|
||||||
|
// Merge $and conditions
|
||||||
|
for (const subFilter of value) {
|
||||||
|
const subInfo = this.analyzeFilter(subFilter, prefix);
|
||||||
|
for (const [field, info] of subInfo) {
|
||||||
|
if (result.has(field)) {
|
||||||
|
// Merge operators
|
||||||
|
const existing = result.get(field)!;
|
||||||
|
existing.operators.push(...info.operators);
|
||||||
|
existing.equality = existing.equality || info.equality;
|
||||||
|
existing.range = existing.range || info.range;
|
||||||
|
existing.in = existing.in || info.in;
|
||||||
|
Object.assign(existing.values, info.values);
|
||||||
|
} else {
|
||||||
|
result.set(field, info);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const fullKey = prefix ? `${prefix}.${key}` : key;
|
||||||
|
const info: IFilterOperatorInfo = {
|
||||||
|
field: fullKey,
|
||||||
|
operators: [],
|
||||||
|
equality: false,
|
||||||
|
range: false,
|
||||||
|
in: false,
|
||||||
|
exists: false,
|
||||||
|
regex: false,
|
||||||
|
values: {},
|
||||||
|
};
|
||||||
|
|
||||||
|
if (typeof value !== 'object' || value === null || value instanceof plugins.bson.ObjectId || value instanceof Date) {
|
||||||
|
// Direct equality
|
||||||
|
info.equality = true;
|
||||||
|
info.operators.push('$eq');
|
||||||
|
info.values['$eq'] = value;
|
||||||
|
} else if (Array.isArray(value)) {
|
||||||
|
// Array equality (rare, but possible)
|
||||||
|
info.equality = true;
|
||||||
|
info.operators.push('$eq');
|
||||||
|
info.values['$eq'] = value;
|
||||||
|
} else {
|
||||||
|
// Operator object
|
||||||
|
for (const [op, opValue] of Object.entries(value)) {
|
||||||
|
if (op.startsWith('$')) {
|
||||||
|
info.operators.push(op);
|
||||||
|
info.values[op] = opValue;
|
||||||
|
|
||||||
|
switch (op) {
|
||||||
|
case '$eq':
|
||||||
|
info.equality = true;
|
||||||
|
break;
|
||||||
|
case '$ne':
|
||||||
|
case '$not':
|
||||||
|
// These can use indexes but with low selectivity
|
||||||
|
break;
|
||||||
|
case '$in':
|
||||||
|
info.in = true;
|
||||||
|
break;
|
||||||
|
case '$nin':
|
||||||
|
// Can't efficiently use indexes
|
||||||
|
break;
|
||||||
|
case '$gt':
|
||||||
|
case '$gte':
|
||||||
|
case '$lt':
|
||||||
|
case '$lte':
|
||||||
|
info.range = true;
|
||||||
|
break;
|
||||||
|
case '$exists':
|
||||||
|
info.exists = true;
|
||||||
|
break;
|
||||||
|
case '$regex':
|
||||||
|
info.regex = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Nested object - recurse
|
||||||
|
const nestedInfo = this.analyzeFilter({ [op]: opValue }, fullKey);
|
||||||
|
for (const [nestedField, nestedFieldInfo] of nestedInfo) {
|
||||||
|
result.set(nestedField, nestedFieldInfo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (info.operators.length > 0) {
|
||||||
|
result.set(fullKey, info);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Score an index for the given filter
|
||||||
|
*/
|
||||||
|
private scoreIndex(
|
||||||
|
index: { name: string; key: Record<string, any>; unique?: boolean; sparse?: boolean },
|
||||||
|
operatorInfo: Map<string, IFilterOperatorInfo>,
|
||||||
|
filter: Document
|
||||||
|
): IQueryPlan {
|
||||||
|
const indexFields = Object.keys(index.key);
|
||||||
|
const usedFields: string[] = [];
|
||||||
|
let usesRange = false;
|
||||||
|
let canUseIndex = true;
|
||||||
|
let selectivity = 1.0;
|
||||||
|
let residualFilter: Document | undefined;
|
||||||
|
|
||||||
|
// Check each index field in order
|
||||||
|
for (const field of indexFields) {
|
||||||
|
const info = operatorInfo.get(field);
|
||||||
|
if (!info) {
|
||||||
|
// Index field not in filter - stop here
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
usedFields.push(field);
|
||||||
|
|
||||||
|
// Calculate selectivity based on operator
|
||||||
|
if (info.equality) {
|
||||||
|
// Equality has high selectivity
|
||||||
|
selectivity *= 0.01; // Assume 1% match
|
||||||
|
} else if (info.in) {
|
||||||
|
// $in selectivity depends on array size
|
||||||
|
const inValues = info.values['$in'];
|
||||||
|
if (Array.isArray(inValues)) {
|
||||||
|
selectivity *= Math.min(0.5, inValues.length * 0.01);
|
||||||
|
} else {
|
||||||
|
selectivity *= 0.1;
|
||||||
|
}
|
||||||
|
} else if (info.range) {
|
||||||
|
// Range queries have moderate selectivity
|
||||||
|
selectivity *= 0.25;
|
||||||
|
usesRange = true;
|
||||||
|
// After range, can't use more index fields efficiently
|
||||||
|
break;
|
||||||
|
} else if (info.exists) {
|
||||||
|
// $exists can use sparse indexes
|
||||||
|
selectivity *= 0.5;
|
||||||
|
} else {
|
||||||
|
// Other operators may not be indexable
|
||||||
|
canUseIndex = false;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!canUseIndex || usedFields.length === 0) {
|
||||||
|
return {
|
||||||
|
type: 'COLLSCAN',
|
||||||
|
indexCovering: false,
|
||||||
|
selectivity: 1.0,
|
||||||
|
usesRange: false,
|
||||||
|
indexFieldsUsed: [],
|
||||||
|
explanation: `Index ${index.name} cannot be used for this query`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build residual filter for conditions not covered by index
|
||||||
|
const coveredFields = new Set(usedFields);
|
||||||
|
const residualConditions: Record<string, any> = {};
|
||||||
|
for (const [field, info] of operatorInfo) {
|
||||||
|
if (!coveredFields.has(field)) {
|
||||||
|
// This field isn't covered by the index
|
||||||
|
if (info.equality) {
|
||||||
|
residualConditions[field] = info.values['$eq'];
|
||||||
|
} else {
|
||||||
|
residualConditions[field] = info.values;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Object.keys(residualConditions).length > 0) {
|
||||||
|
residualFilter = residualConditions;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unique indexes have better selectivity for equality
|
||||||
|
if (index.unique && usedFields.length === indexFields.length) {
|
||||||
|
selectivity = Math.min(selectivity, 0.001); // At most 1 document
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: usesRange ? 'IXSCAN_RANGE' : 'IXSCAN',
|
||||||
|
indexName: index.name,
|
||||||
|
indexKey: index.key,
|
||||||
|
indexCovering: Object.keys(residualConditions).length === 0,
|
||||||
|
selectivity,
|
||||||
|
usesRange,
|
||||||
|
indexFieldsUsed: usedFields,
|
||||||
|
residualFilter,
|
||||||
|
explanation: `Using index ${index.name} on fields [${usedFields.join(', ')}]`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate overall score for a plan (higher is better)
|
||||||
|
*/
|
||||||
|
private calculateScore(plan: IQueryPlan): number {
|
||||||
|
let score = 0;
|
||||||
|
|
||||||
|
// Lower selectivity is better (fewer documents to fetch)
|
||||||
|
score += (1 - plan.selectivity) * 100;
|
||||||
|
|
||||||
|
// Index covering queries are best
|
||||||
|
if (plan.indexCovering) {
|
||||||
|
score += 50;
|
||||||
|
}
|
||||||
|
|
||||||
|
// More index fields used is better
|
||||||
|
score += plan.indexFieldsUsed.length * 10;
|
||||||
|
|
||||||
|
// Equality scans are better than range scans
|
||||||
|
if (!plan.usesRange) {
|
||||||
|
score += 20;
|
||||||
|
}
|
||||||
|
|
||||||
|
return score;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Explain a query - returns detailed plan information
|
||||||
|
*/
|
||||||
|
async explain(filter: Document): Promise<{
|
||||||
|
queryPlanner: {
|
||||||
|
plannerVersion: number;
|
||||||
|
namespace: string;
|
||||||
|
indexFilterSet: boolean;
|
||||||
|
winningPlan: IQueryPlan;
|
||||||
|
rejectedPlans: IQueryPlan[];
|
||||||
|
};
|
||||||
|
}> {
|
||||||
|
await this.indexEngine['initialize']();
|
||||||
|
|
||||||
|
// Analyze the filter
|
||||||
|
const operatorInfo = this.analyzeFilter(filter);
|
||||||
|
|
||||||
|
// Get available indexes
|
||||||
|
const indexes = await this.indexEngine.listIndexes();
|
||||||
|
|
||||||
|
// Score all indexes
|
||||||
|
const plans: IQueryPlan[] = [];
|
||||||
|
|
||||||
|
for (const index of indexes) {
|
||||||
|
const plan = this.scoreIndex(index, operatorInfo, filter);
|
||||||
|
plans.push(plan);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add collection scan as fallback
|
||||||
|
plans.push({
|
||||||
|
type: 'COLLSCAN',
|
||||||
|
indexCovering: false,
|
||||||
|
selectivity: 1.0,
|
||||||
|
usesRange: false,
|
||||||
|
indexFieldsUsed: [],
|
||||||
|
explanation: 'Full collection scan',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Sort by score (best first)
|
||||||
|
plans.sort((a, b) => this.calculateScore(b) - this.calculateScore(a));
|
||||||
|
|
||||||
|
return {
|
||||||
|
queryPlanner: {
|
||||||
|
plannerVersion: 1,
|
||||||
|
namespace: `${this.indexEngine['dbName']}.${this.indexEngine['collName']}`,
|
||||||
|
indexFilterSet: false,
|
||||||
|
winningPlan: plans[0],
|
||||||
|
rejectedPlans: plans.slice(1),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
292
ts/ts_tsmdb/engine/SessionEngine.ts
Normal file
292
ts/ts_tsmdb/engine/SessionEngine.ts
Normal file
@@ -0,0 +1,292 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { TransactionEngine } from './TransactionEngine.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session state
|
||||||
|
*/
|
||||||
|
export interface ISession {
|
||||||
|
/** Session ID (UUID) */
|
||||||
|
id: string;
|
||||||
|
/** Timestamp when the session was created */
|
||||||
|
createdAt: number;
|
||||||
|
/** Timestamp of the last activity */
|
||||||
|
lastActivityAt: number;
|
||||||
|
/** Current transaction ID if any */
|
||||||
|
txnId?: string;
|
||||||
|
/** Transaction number for ordering */
|
||||||
|
txnNumber?: number;
|
||||||
|
/** Whether the session is in a transaction */
|
||||||
|
inTransaction: boolean;
|
||||||
|
/** Session metadata */
|
||||||
|
metadata?: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session engine options
|
||||||
|
*/
|
||||||
|
export interface ISessionEngineOptions {
|
||||||
|
/** Session timeout in milliseconds (default: 30 minutes) */
|
||||||
|
sessionTimeoutMs?: number;
|
||||||
|
/** Interval to check for expired sessions in ms (default: 60 seconds) */
|
||||||
|
cleanupIntervalMs?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session engine for managing client sessions
|
||||||
|
* - Tracks session lifecycle (create, touch, end)
|
||||||
|
* - Links sessions to transactions
|
||||||
|
* - Auto-aborts transactions on session expiry
|
||||||
|
*/
|
||||||
|
export class SessionEngine {
|
||||||
|
private sessions: Map<string, ISession> = new Map();
|
||||||
|
private sessionTimeoutMs: number;
|
||||||
|
private cleanupInterval?: ReturnType<typeof setInterval>;
|
||||||
|
private transactionEngine?: TransactionEngine;
|
||||||
|
|
||||||
|
constructor(options?: ISessionEngineOptions) {
|
||||||
|
this.sessionTimeoutMs = options?.sessionTimeoutMs ?? 30 * 60 * 1000; // 30 minutes default
|
||||||
|
const cleanupIntervalMs = options?.cleanupIntervalMs ?? 60 * 1000; // 1 minute default
|
||||||
|
|
||||||
|
// Start cleanup interval
|
||||||
|
this.cleanupInterval = setInterval(() => {
|
||||||
|
this.cleanupExpiredSessions();
|
||||||
|
}, cleanupIntervalMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the transaction engine to use for auto-abort
|
||||||
|
*/
|
||||||
|
setTransactionEngine(engine: TransactionEngine): void {
|
||||||
|
this.transactionEngine = engine;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start a new session
|
||||||
|
*/
|
||||||
|
startSession(sessionId?: string, metadata?: Record<string, any>): ISession {
|
||||||
|
const id = sessionId ?? new plugins.bson.UUID().toHexString();
|
||||||
|
const now = Date.now();
|
||||||
|
|
||||||
|
const session: ISession = {
|
||||||
|
id,
|
||||||
|
createdAt: now,
|
||||||
|
lastActivityAt: now,
|
||||||
|
inTransaction: false,
|
||||||
|
metadata,
|
||||||
|
};
|
||||||
|
|
||||||
|
this.sessions.set(id, session);
|
||||||
|
return session;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a session by ID
|
||||||
|
*/
|
||||||
|
getSession(sessionId: string): ISession | undefined {
|
||||||
|
const session = this.sessions.get(sessionId);
|
||||||
|
if (session && this.isSessionExpired(session)) {
|
||||||
|
// Session expired, clean it up
|
||||||
|
this.endSession(sessionId);
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return session;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Touch a session to update last activity time
|
||||||
|
*/
|
||||||
|
touchSession(sessionId: string): boolean {
|
||||||
|
const session = this.sessions.get(sessionId);
|
||||||
|
if (!session) return false;
|
||||||
|
|
||||||
|
if (this.isSessionExpired(session)) {
|
||||||
|
this.endSession(sessionId);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
session.lastActivityAt = Date.now();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* End a session explicitly
|
||||||
|
* This will also abort any active transaction
|
||||||
|
*/
|
||||||
|
async endSession(sessionId: string): Promise<boolean> {
|
||||||
|
const session = this.sessions.get(sessionId);
|
||||||
|
if (!session) return false;
|
||||||
|
|
||||||
|
// If session has an active transaction, abort it
|
||||||
|
if (session.inTransaction && session.txnId && this.transactionEngine) {
|
||||||
|
try {
|
||||||
|
await this.transactionEngine.abortTransaction(session.txnId);
|
||||||
|
} catch (e) {
|
||||||
|
// Ignore abort errors during cleanup
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.sessions.delete(sessionId);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start a transaction in a session
|
||||||
|
*/
|
||||||
|
startTransaction(sessionId: string, txnId: string, txnNumber?: number): boolean {
|
||||||
|
const session = this.sessions.get(sessionId);
|
||||||
|
if (!session) return false;
|
||||||
|
|
||||||
|
if (this.isSessionExpired(session)) {
|
||||||
|
this.endSession(sessionId);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
session.txnId = txnId;
|
||||||
|
session.txnNumber = txnNumber;
|
||||||
|
session.inTransaction = true;
|
||||||
|
session.lastActivityAt = Date.now();
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* End a transaction in a session (commit or abort)
|
||||||
|
*/
|
||||||
|
endTransaction(sessionId: string): boolean {
|
||||||
|
const session = this.sessions.get(sessionId);
|
||||||
|
if (!session) return false;
|
||||||
|
|
||||||
|
session.txnId = undefined;
|
||||||
|
session.txnNumber = undefined;
|
||||||
|
session.inTransaction = false;
|
||||||
|
session.lastActivityAt = Date.now();
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get transaction ID for a session
|
||||||
|
*/
|
||||||
|
getTransactionId(sessionId: string): string | undefined {
|
||||||
|
const session = this.sessions.get(sessionId);
|
||||||
|
return session?.txnId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if session is in a transaction
|
||||||
|
*/
|
||||||
|
isInTransaction(sessionId: string): boolean {
|
||||||
|
const session = this.sessions.get(sessionId);
|
||||||
|
return session?.inTransaction ?? false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a session is expired
|
||||||
|
*/
|
||||||
|
isSessionExpired(session: ISession): boolean {
|
||||||
|
return Date.now() - session.lastActivityAt > this.sessionTimeoutMs;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cleanup expired sessions
|
||||||
|
* This is called periodically by the cleanup interval
|
||||||
|
*/
|
||||||
|
private async cleanupExpiredSessions(): Promise<void> {
|
||||||
|
const expiredSessions: string[] = [];
|
||||||
|
|
||||||
|
for (const [id, session] of this.sessions) {
|
||||||
|
if (this.isSessionExpired(session)) {
|
||||||
|
expiredSessions.push(id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// End all expired sessions (this will also abort their transactions)
|
||||||
|
for (const sessionId of expiredSessions) {
|
||||||
|
await this.endSession(sessionId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all active sessions
|
||||||
|
*/
|
||||||
|
listSessions(): ISession[] {
|
||||||
|
const activeSessions: ISession[] = [];
|
||||||
|
for (const session of this.sessions.values()) {
|
||||||
|
if (!this.isSessionExpired(session)) {
|
||||||
|
activeSessions.push(session);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return activeSessions;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get session count
|
||||||
|
*/
|
||||||
|
getSessionCount(): number {
|
||||||
|
return this.sessions.size;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get sessions with active transactions
|
||||||
|
*/
|
||||||
|
getSessionsWithTransactions(): ISession[] {
|
||||||
|
return this.listSessions().filter(s => s.inTransaction);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh session timeout
|
||||||
|
*/
|
||||||
|
refreshSession(sessionId: string): boolean {
|
||||||
|
return this.touchSession(sessionId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Close the session engine and cleanup
|
||||||
|
*/
|
||||||
|
close(): void {
|
||||||
|
if (this.cleanupInterval) {
|
||||||
|
clearInterval(this.cleanupInterval);
|
||||||
|
this.cleanupInterval = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear all sessions
|
||||||
|
this.sessions.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get or create a session for a given session ID
|
||||||
|
* Useful for handling MongoDB driver session requests
|
||||||
|
*/
|
||||||
|
getOrCreateSession(sessionId: string): ISession {
|
||||||
|
let session = this.getSession(sessionId);
|
||||||
|
if (!session) {
|
||||||
|
session = this.startSession(sessionId);
|
||||||
|
} else {
|
||||||
|
this.touchSession(sessionId);
|
||||||
|
}
|
||||||
|
return session;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract session ID from MongoDB lsid (logical session ID)
|
||||||
|
*/
|
||||||
|
static extractSessionId(lsid: any): string | undefined {
|
||||||
|
if (!lsid) return undefined;
|
||||||
|
|
||||||
|
// MongoDB session ID format: { id: UUID }
|
||||||
|
if (lsid.id) {
|
||||||
|
if (lsid.id instanceof plugins.bson.UUID) {
|
||||||
|
return lsid.id.toHexString();
|
||||||
|
}
|
||||||
|
if (typeof lsid.id === 'string') {
|
||||||
|
return lsid.id;
|
||||||
|
}
|
||||||
|
if (lsid.id.$binary?.base64) {
|
||||||
|
// Binary UUID format
|
||||||
|
return Buffer.from(lsid.id.$binary.base64, 'base64').toString('hex');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import * as plugins from '../tsmdb.plugins.js';
|
import * as plugins from '../plugins.js';
|
||||||
import type { IStorageAdapter } from '../storage/IStorageAdapter.js';
|
import type { IStorageAdapter } from '../storage/IStorageAdapter.js';
|
||||||
import type { Document, IStoredDocument, ITransactionOptions } from '../types/interfaces.js';
|
import type { Document, IStoredDocument, ITransactionOptions } from '../types/interfaces.js';
|
||||||
import { TsmdbTransactionError, TsmdbWriteConflictError } from '../errors/TsmdbErrors.js';
|
import { TsmdbTransactionError, TsmdbWriteConflictError } from '../errors/TsmdbErrors.js';
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import * as plugins from '../tsmdb.plugins.js';
|
import * as plugins from '../plugins.js';
|
||||||
import type { Document, IStoredDocument } from '../types/interfaces.js';
|
import type { Document, IStoredDocument } from '../types/interfaces.js';
|
||||||
import { QueryEngine } from './QueryEngine.js';
|
import { QueryEngine } from './QueryEngine.js';
|
||||||
|
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
// Use the official MongoDB driver to connect to TsmdbServer
|
// Use the official MongoDB driver to connect to TsmdbServer
|
||||||
|
|
||||||
// Re-export plugins for external use
|
// Re-export plugins for external use
|
||||||
import * as plugins from './tsmdb.plugins.js';
|
import * as plugins from './plugins.js';
|
||||||
export { plugins };
|
export { plugins };
|
||||||
|
|
||||||
// Export BSON types for convenience
|
// Export BSON types for convenience
|
||||||
@@ -19,6 +19,8 @@ export type { IStorageAdapter } from './storage/IStorageAdapter.js';
|
|||||||
export { MemoryStorageAdapter } from './storage/MemoryStorageAdapter.js';
|
export { MemoryStorageAdapter } from './storage/MemoryStorageAdapter.js';
|
||||||
export { FileStorageAdapter } from './storage/FileStorageAdapter.js';
|
export { FileStorageAdapter } from './storage/FileStorageAdapter.js';
|
||||||
export { OpLog } from './storage/OpLog.js';
|
export { OpLog } from './storage/OpLog.js';
|
||||||
|
export { WAL } from './storage/WAL.js';
|
||||||
|
export type { IWalEntry, TWalOperation } from './storage/WAL.js';
|
||||||
|
|
||||||
// Export engines
|
// Export engines
|
||||||
export { QueryEngine } from './engine/QueryEngine.js';
|
export { QueryEngine } from './engine/QueryEngine.js';
|
||||||
@@ -26,6 +28,10 @@ export { UpdateEngine } from './engine/UpdateEngine.js';
|
|||||||
export { AggregationEngine } from './engine/AggregationEngine.js';
|
export { AggregationEngine } from './engine/AggregationEngine.js';
|
||||||
export { IndexEngine } from './engine/IndexEngine.js';
|
export { IndexEngine } from './engine/IndexEngine.js';
|
||||||
export { TransactionEngine } from './engine/TransactionEngine.js';
|
export { TransactionEngine } from './engine/TransactionEngine.js';
|
||||||
|
export { QueryPlanner } from './engine/QueryPlanner.js';
|
||||||
|
export type { IQueryPlan, TQueryPlanType } from './engine/QueryPlanner.js';
|
||||||
|
export { SessionEngine } from './engine/SessionEngine.js';
|
||||||
|
export type { ISession, ISessionEngineOptions } from './engine/SessionEngine.js';
|
||||||
|
|
||||||
// Export server (the main entry point for using TsmDB)
|
// Export server (the main entry point for using TsmDB)
|
||||||
export { TsmdbServer } from './server/TsmdbServer.js';
|
export { TsmdbServer } from './server/TsmdbServer.js';
|
||||||
@@ -35,3 +41,6 @@ export type { ITsmdbServerOptions } from './server/TsmdbServer.js';
|
|||||||
export { WireProtocol } from './server/WireProtocol.js';
|
export { WireProtocol } from './server/WireProtocol.js';
|
||||||
export { CommandRouter } from './server/CommandRouter.js';
|
export { CommandRouter } from './server/CommandRouter.js';
|
||||||
export type { ICommandHandler, IHandlerContext, ICursorState } from './server/CommandRouter.js';
|
export type { ICommandHandler, IHandlerContext, ICursorState } from './server/CommandRouter.js';
|
||||||
|
|
||||||
|
// Export utilities
|
||||||
|
export * from './utils/checksum.js';
|
||||||
@@ -1,7 +1,10 @@
|
|||||||
import * as plugins from '../tsmdb.plugins.js';
|
import * as plugins from '../plugins.js';
|
||||||
import type { IStorageAdapter } from '../storage/IStorageAdapter.js';
|
import type { IStorageAdapter } from '../storage/IStorageAdapter.js';
|
||||||
import type { IParsedCommand } from './WireProtocol.js';
|
import type { IParsedCommand } from './WireProtocol.js';
|
||||||
import type { TsmdbServer } from './TsmdbServer.js';
|
import type { TsmdbServer } from './TsmdbServer.js';
|
||||||
|
import { IndexEngine } from '../engine/IndexEngine.js';
|
||||||
|
import { TransactionEngine } from '../engine/TransactionEngine.js';
|
||||||
|
import { SessionEngine } from '../engine/SessionEngine.js';
|
||||||
|
|
||||||
// Import handlers
|
// Import handlers
|
||||||
import { HelloHandler } from './handlers/HelloHandler.js';
|
import { HelloHandler } from './handlers/HelloHandler.js';
|
||||||
@@ -22,6 +25,16 @@ export interface IHandlerContext {
|
|||||||
database: string;
|
database: string;
|
||||||
command: plugins.bson.Document;
|
command: plugins.bson.Document;
|
||||||
documentSequences?: Map<string, plugins.bson.Document[]>;
|
documentSequences?: Map<string, plugins.bson.Document[]>;
|
||||||
|
/** Get or create an IndexEngine for a collection */
|
||||||
|
getIndexEngine: (collName: string) => IndexEngine;
|
||||||
|
/** Transaction engine instance */
|
||||||
|
transactionEngine: TransactionEngine;
|
||||||
|
/** Current transaction ID (if in a transaction) */
|
||||||
|
txnId?: string;
|
||||||
|
/** Session ID (from lsid) */
|
||||||
|
sessionId?: string;
|
||||||
|
/** Session engine instance */
|
||||||
|
sessionEngine: SessionEngine;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -43,12 +56,54 @@ export class CommandRouter {
|
|||||||
private cursors: Map<bigint, ICursorState> = new Map();
|
private cursors: Map<bigint, ICursorState> = new Map();
|
||||||
private cursorIdCounter: bigint = BigInt(1);
|
private cursorIdCounter: bigint = BigInt(1);
|
||||||
|
|
||||||
|
// Index engine cache: db.collection -> IndexEngine
|
||||||
|
private indexEngines: Map<string, IndexEngine> = new Map();
|
||||||
|
|
||||||
|
// Transaction engine (shared across all handlers)
|
||||||
|
private transactionEngine: TransactionEngine;
|
||||||
|
|
||||||
|
// Session engine (shared across all handlers)
|
||||||
|
private sessionEngine: SessionEngine;
|
||||||
|
|
||||||
constructor(storage: IStorageAdapter, server: TsmdbServer) {
|
constructor(storage: IStorageAdapter, server: TsmdbServer) {
|
||||||
this.storage = storage;
|
this.storage = storage;
|
||||||
this.server = server;
|
this.server = server;
|
||||||
|
this.transactionEngine = new TransactionEngine(storage);
|
||||||
|
this.sessionEngine = new SessionEngine();
|
||||||
|
// Link session engine to transaction engine for auto-abort on session expiry
|
||||||
|
this.sessionEngine.setTransactionEngine(this.transactionEngine);
|
||||||
this.registerHandlers();
|
this.registerHandlers();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get or create an IndexEngine for a database.collection
|
||||||
|
*/
|
||||||
|
getIndexEngine(dbName: string, collName: string): IndexEngine {
|
||||||
|
const key = `${dbName}.${collName}`;
|
||||||
|
let engine = this.indexEngines.get(key);
|
||||||
|
if (!engine) {
|
||||||
|
engine = new IndexEngine(dbName, collName, this.storage);
|
||||||
|
this.indexEngines.set(key, engine);
|
||||||
|
}
|
||||||
|
return engine;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear index engine cache for a collection (used when collection is dropped)
|
||||||
|
*/
|
||||||
|
clearIndexEngineCache(dbName: string, collName?: string): void {
|
||||||
|
if (collName) {
|
||||||
|
this.indexEngines.delete(`${dbName}.${collName}`);
|
||||||
|
} else {
|
||||||
|
// Clear all engines for the database
|
||||||
|
for (const key of this.indexEngines.keys()) {
|
||||||
|
if (key.startsWith(`${dbName}.`)) {
|
||||||
|
this.indexEngines.delete(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Register all command handlers
|
* Register all command handlers
|
||||||
*/
|
*/
|
||||||
@@ -120,6 +175,29 @@ export class CommandRouter {
|
|||||||
async route(parsedCommand: IParsedCommand): Promise<plugins.bson.Document> {
|
async route(parsedCommand: IParsedCommand): Promise<plugins.bson.Document> {
|
||||||
const { commandName, command, database, documentSequences } = parsedCommand;
|
const { commandName, command, database, documentSequences } = parsedCommand;
|
||||||
|
|
||||||
|
// Extract session ID from lsid using SessionEngine helper
|
||||||
|
let sessionId = SessionEngine.extractSessionId(command.lsid);
|
||||||
|
let txnId: string | undefined;
|
||||||
|
|
||||||
|
// If we have a session ID, register/touch the session
|
||||||
|
if (sessionId) {
|
||||||
|
this.sessionEngine.getOrCreateSession(sessionId);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this starts a new transaction
|
||||||
|
if (command.startTransaction && sessionId) {
|
||||||
|
txnId = this.transactionEngine.startTransaction(sessionId);
|
||||||
|
this.sessionEngine.startTransaction(sessionId, txnId, command.txnNumber);
|
||||||
|
} else if (sessionId && this.sessionEngine.isInTransaction(sessionId)) {
|
||||||
|
// Continue existing transaction
|
||||||
|
txnId = this.sessionEngine.getTransactionId(sessionId);
|
||||||
|
// Verify transaction is still active
|
||||||
|
if (txnId && !this.transactionEngine.isActive(txnId)) {
|
||||||
|
this.sessionEngine.endTransaction(sessionId);
|
||||||
|
txnId = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Create handler context
|
// Create handler context
|
||||||
const context: IHandlerContext = {
|
const context: IHandlerContext = {
|
||||||
storage: this.storage,
|
storage: this.storage,
|
||||||
@@ -127,6 +205,11 @@ export class CommandRouter {
|
|||||||
database,
|
database,
|
||||||
command,
|
command,
|
||||||
documentSequences,
|
documentSequences,
|
||||||
|
getIndexEngine: (collName: string) => this.getIndexEngine(database, collName),
|
||||||
|
transactionEngine: this.transactionEngine,
|
||||||
|
sessionEngine: this.sessionEngine,
|
||||||
|
txnId,
|
||||||
|
sessionId,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Find handler
|
// Find handler
|
||||||
@@ -164,6 +247,32 @@ export class CommandRouter {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Close the command router and cleanup resources
|
||||||
|
*/
|
||||||
|
close(): void {
|
||||||
|
// Close session engine (stops cleanup interval, clears sessions)
|
||||||
|
this.sessionEngine.close();
|
||||||
|
// Clear cursors
|
||||||
|
this.cursors.clear();
|
||||||
|
// Clear index engine cache
|
||||||
|
this.indexEngines.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get session engine (for administrative purposes)
|
||||||
|
*/
|
||||||
|
getSessionEngine(): SessionEngine {
|
||||||
|
return this.sessionEngine;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get transaction engine (for administrative purposes)
|
||||||
|
*/
|
||||||
|
getTransactionEngine(): TransactionEngine {
|
||||||
|
return this.transactionEngine;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
import * as net from 'net';
|
import * as net from 'net';
|
||||||
import * as plugins from '../tsmdb.plugins.js';
|
import * as plugins from '../plugins.js';
|
||||||
import { WireProtocol, OP_QUERY } from './WireProtocol.js';
|
import { WireProtocol, OP_QUERY } from './WireProtocol.js';
|
||||||
import { CommandRouter } from './CommandRouter.js';
|
import { CommandRouter } from './CommandRouter.js';
|
||||||
import { MemoryStorageAdapter } from '../storage/MemoryStorageAdapter.js';
|
import { MemoryStorageAdapter } from '../storage/MemoryStorageAdapter.js';
|
||||||
@@ -154,6 +154,9 @@ export class TsmdbServer {
|
|||||||
}
|
}
|
||||||
this.connections.clear();
|
this.connections.clear();
|
||||||
|
|
||||||
|
// Close command router (cleans up session engine, cursors, etc.)
|
||||||
|
this.commandRouter.close();
|
||||||
|
|
||||||
// Close storage
|
// Close storage
|
||||||
await this.storage.close();
|
await this.storage.close();
|
||||||
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import * as plugins from '../tsmdb.plugins.js';
|
import * as plugins from '../plugins.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* MongoDB Wire Protocol Implementation
|
* MongoDB Wire Protocol Implementation
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
import * as plugins from '../../tsmdb.plugins.js';
|
import * as plugins from '../../plugins.js';
|
||||||
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
||||||
|
import { SessionEngine } from '../../engine/SessionEngine.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* AdminHandler - Handles administrative commands
|
* AdminHandler - Handles administrative commands
|
||||||
@@ -237,10 +238,12 @@ export class AdminHandler implements ICommandHandler {
|
|||||||
* Handle serverStatus command
|
* Handle serverStatus command
|
||||||
*/
|
*/
|
||||||
private async handleServerStatus(context: IHandlerContext): Promise<plugins.bson.Document> {
|
private async handleServerStatus(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
const { server } = context;
|
const { server, sessionEngine } = context;
|
||||||
|
|
||||||
const uptime = server.getUptime();
|
const uptime = server.getUptime();
|
||||||
const connections = server.getConnectionCount();
|
const connections = server.getConnectionCount();
|
||||||
|
const sessions = sessionEngine.listSessions();
|
||||||
|
const sessionsWithTxn = sessionEngine.getSessionsWithTransactions();
|
||||||
|
|
||||||
return {
|
return {
|
||||||
ok: 1,
|
ok: 1,
|
||||||
@@ -263,6 +266,26 @@ export class AdminHandler implements ICommandHandler {
|
|||||||
totalCreated: connections,
|
totalCreated: connections,
|
||||||
active: connections,
|
active: connections,
|
||||||
},
|
},
|
||||||
|
logicalSessionRecordCache: {
|
||||||
|
activeSessionsCount: sessions.length,
|
||||||
|
sessionsCollectionJobCount: 0,
|
||||||
|
lastSessionsCollectionJobDurationMillis: 0,
|
||||||
|
lastSessionsCollectionJobTimestamp: new Date(),
|
||||||
|
transactionReaperJobCount: 0,
|
||||||
|
lastTransactionReaperJobDurationMillis: 0,
|
||||||
|
lastTransactionReaperJobTimestamp: new Date(),
|
||||||
|
},
|
||||||
|
transactions: {
|
||||||
|
retriedCommandsCount: 0,
|
||||||
|
retriedStatementsCount: 0,
|
||||||
|
transactionsCollectionWriteCount: 0,
|
||||||
|
currentActive: sessionsWithTxn.length,
|
||||||
|
currentInactive: 0,
|
||||||
|
currentOpen: sessionsWithTxn.length,
|
||||||
|
totalStarted: sessionsWithTxn.length,
|
||||||
|
totalCommitted: 0,
|
||||||
|
totalAborted: 0,
|
||||||
|
},
|
||||||
network: {
|
network: {
|
||||||
bytesIn: 0,
|
bytesIn: 0,
|
||||||
bytesOut: 0,
|
bytesOut: 0,
|
||||||
@@ -409,6 +432,17 @@ export class AdminHandler implements ICommandHandler {
|
|||||||
* Handle endSessions command
|
* Handle endSessions command
|
||||||
*/
|
*/
|
||||||
private async handleEndSessions(context: IHandlerContext): Promise<plugins.bson.Document> {
|
private async handleEndSessions(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { command, sessionEngine } = context;
|
||||||
|
|
||||||
|
// End each session in the array
|
||||||
|
const sessions = command.endSessions || [];
|
||||||
|
for (const sessionSpec of sessions) {
|
||||||
|
const sessionId = SessionEngine.extractSessionId(sessionSpec);
|
||||||
|
if (sessionId) {
|
||||||
|
await sessionEngine.endSession(sessionId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return { ok: 1 };
|
return { ok: 1 };
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -416,16 +450,87 @@ export class AdminHandler implements ICommandHandler {
|
|||||||
* Handle abortTransaction command
|
* Handle abortTransaction command
|
||||||
*/
|
*/
|
||||||
private async handleAbortTransaction(context: IHandlerContext): Promise<plugins.bson.Document> {
|
private async handleAbortTransaction(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
// Transactions are not fully supported, but acknowledge the command
|
const { transactionEngine, sessionEngine, txnId, sessionId } = context;
|
||||||
return { ok: 1 };
|
|
||||||
|
if (!txnId) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'No transaction started',
|
||||||
|
code: 251,
|
||||||
|
codeName: 'NoSuchTransaction',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await transactionEngine.abortTransaction(txnId);
|
||||||
|
transactionEngine.endTransaction(txnId);
|
||||||
|
// Update session state
|
||||||
|
if (sessionId) {
|
||||||
|
sessionEngine.endTransaction(sessionId);
|
||||||
|
}
|
||||||
|
return { ok: 1 };
|
||||||
|
} catch (error: any) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: error.message || 'Abort transaction failed',
|
||||||
|
code: error.code || 1,
|
||||||
|
codeName: error.codeName || 'UnknownError',
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handle commitTransaction command
|
* Handle commitTransaction command
|
||||||
*/
|
*/
|
||||||
private async handleCommitTransaction(context: IHandlerContext): Promise<plugins.bson.Document> {
|
private async handleCommitTransaction(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
// Transactions are not fully supported, but acknowledge the command
|
const { transactionEngine, sessionEngine, txnId, sessionId } = context;
|
||||||
return { ok: 1 };
|
|
||||||
|
if (!txnId) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'No transaction started',
|
||||||
|
code: 251,
|
||||||
|
codeName: 'NoSuchTransaction',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await transactionEngine.commitTransaction(txnId);
|
||||||
|
transactionEngine.endTransaction(txnId);
|
||||||
|
// Update session state
|
||||||
|
if (sessionId) {
|
||||||
|
sessionEngine.endTransaction(sessionId);
|
||||||
|
}
|
||||||
|
return { ok: 1 };
|
||||||
|
} catch (error: any) {
|
||||||
|
// If commit fails, transaction should be aborted
|
||||||
|
try {
|
||||||
|
await transactionEngine.abortTransaction(txnId);
|
||||||
|
transactionEngine.endTransaction(txnId);
|
||||||
|
if (sessionId) {
|
||||||
|
sessionEngine.endTransaction(sessionId);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Ignore abort errors
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error.code === 112) {
|
||||||
|
// Write conflict
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: error.message || 'Write conflict during commit',
|
||||||
|
code: 112,
|
||||||
|
codeName: 'WriteConflict',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: error.message || 'Commit transaction failed',
|
||||||
|
code: error.code || 1,
|
||||||
|
codeName: error.codeName || 'UnknownError',
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import * as plugins from '../../tsmdb.plugins.js';
|
import * as plugins from '../../plugins.js';
|
||||||
import type { ICommandHandler, IHandlerContext, ICursorState } from '../CommandRouter.js';
|
import type { ICommandHandler, IHandlerContext, ICursorState } from '../CommandRouter.js';
|
||||||
import { AggregationEngine } from '../../engine/AggregationEngine.js';
|
import { AggregationEngine } from '../../engine/AggregationEngine.js';
|
||||||
|
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
import * as plugins from '../../tsmdb.plugins.js';
|
import * as plugins from '../../plugins.js';
|
||||||
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
||||||
|
import type { IStoredDocument } from '../../types/interfaces.js';
|
||||||
import { QueryEngine } from '../../engine/QueryEngine.js';
|
import { QueryEngine } from '../../engine/QueryEngine.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -47,6 +48,8 @@ export class DeleteHandler implements ICommandHandler {
|
|||||||
return { ok: 1, n: 0 };
|
return { ok: 1, n: 0 };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const indexEngine = context.getIndexEngine(collection);
|
||||||
|
|
||||||
for (let i = 0; i < deletes.length; i++) {
|
for (let i = 0; i < deletes.length; i++) {
|
||||||
const deleteSpec = deletes[i];
|
const deleteSpec = deletes[i];
|
||||||
const filter = deleteSpec.q || deleteSpec.filter || {};
|
const filter = deleteSpec.q || deleteSpec.filter || {};
|
||||||
@@ -56,8 +59,15 @@ export class DeleteHandler implements ICommandHandler {
|
|||||||
const deleteAll = limit === 0;
|
const deleteAll = limit === 0;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Get all documents
|
// Try to use index-accelerated query
|
||||||
const documents = await storage.findAll(database, collection);
|
const candidateIds = await indexEngine.findCandidateIds(filter);
|
||||||
|
|
||||||
|
let documents: IStoredDocument[];
|
||||||
|
if (candidateIds !== null) {
|
||||||
|
documents = await storage.findByIds(database, collection, candidateIds);
|
||||||
|
} else {
|
||||||
|
documents = await storage.findAll(database, collection);
|
||||||
|
}
|
||||||
|
|
||||||
// Apply filter
|
// Apply filter
|
||||||
const matchingDocs = QueryEngine.filter(documents, filter);
|
const matchingDocs = QueryEngine.filter(documents, filter);
|
||||||
@@ -69,6 +79,11 @@ export class DeleteHandler implements ICommandHandler {
|
|||||||
// Determine which documents to delete
|
// Determine which documents to delete
|
||||||
const docsToDelete = deleteAll ? matchingDocs : matchingDocs.slice(0, 1);
|
const docsToDelete = deleteAll ? matchingDocs : matchingDocs.slice(0, 1);
|
||||||
|
|
||||||
|
// Update indexes for deleted documents
|
||||||
|
for (const doc of docsToDelete) {
|
||||||
|
await indexEngine.onDelete(doc as any);
|
||||||
|
}
|
||||||
|
|
||||||
// Delete the documents
|
// Delete the documents
|
||||||
const idsToDelete = docsToDelete.map(doc => doc._id);
|
const idsToDelete = docsToDelete.map(doc => doc._id);
|
||||||
const deleted = await storage.deleteByIds(database, collection, idsToDelete);
|
const deleted = await storage.deleteByIds(database, collection, idsToDelete);
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
import * as plugins from '../../tsmdb.plugins.js';
|
import * as plugins from '../../plugins.js';
|
||||||
import type { ICommandHandler, IHandlerContext, ICursorState } from '../CommandRouter.js';
|
import type { ICommandHandler, IHandlerContext, ICursorState } from '../CommandRouter.js';
|
||||||
|
import type { IStoredDocument } from '../../types/interfaces.js';
|
||||||
import { QueryEngine } from '../../engine/QueryEngine.js';
|
import { QueryEngine } from '../../engine/QueryEngine.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -45,7 +46,7 @@ export class FindHandler implements ICommandHandler {
|
|||||||
* Handle find command
|
* Handle find command
|
||||||
*/
|
*/
|
||||||
private async handleFind(context: IHandlerContext): Promise<plugins.bson.Document> {
|
private async handleFind(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
const { storage, database, command } = context;
|
const { storage, database, command, getIndexEngine } = context;
|
||||||
|
|
||||||
const collection = command.find;
|
const collection = command.find;
|
||||||
const filter = command.filter || {};
|
const filter = command.filter || {};
|
||||||
@@ -70,11 +71,22 @@ export class FindHandler implements ICommandHandler {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get all documents
|
// Try to use index-accelerated query
|
||||||
let documents = await storage.findAll(database, collection);
|
const indexEngine = getIndexEngine(collection);
|
||||||
|
const candidateIds = await indexEngine.findCandidateIds(filter);
|
||||||
|
|
||||||
// Apply filter
|
let documents: IStoredDocument[];
|
||||||
documents = QueryEngine.filter(documents, filter);
|
if (candidateIds !== null) {
|
||||||
|
// Index hit - fetch only candidate documents
|
||||||
|
documents = await storage.findByIds(database, collection, candidateIds);
|
||||||
|
// Still apply filter for any conditions the index couldn't fully satisfy
|
||||||
|
documents = QueryEngine.filter(documents, filter);
|
||||||
|
} else {
|
||||||
|
// No suitable index - full collection scan
|
||||||
|
documents = await storage.findAll(database, collection);
|
||||||
|
// Apply filter
|
||||||
|
documents = QueryEngine.filter(documents, filter);
|
||||||
|
}
|
||||||
|
|
||||||
// Apply sort
|
// Apply sort
|
||||||
if (sort) {
|
if (sort) {
|
||||||
@@ -233,7 +245,7 @@ export class FindHandler implements ICommandHandler {
|
|||||||
* Handle count command
|
* Handle count command
|
||||||
*/
|
*/
|
||||||
private async handleCount(context: IHandlerContext): Promise<plugins.bson.Document> {
|
private async handleCount(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
const { storage, database, command } = context;
|
const { storage, database, command, getIndexEngine } = context;
|
||||||
|
|
||||||
const collection = command.count;
|
const collection = command.count;
|
||||||
const query = command.query || {};
|
const query = command.query || {};
|
||||||
@@ -246,11 +258,20 @@ export class FindHandler implements ICommandHandler {
|
|||||||
return { ok: 1, n: 0 };
|
return { ok: 1, n: 0 };
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get all documents
|
// Try to use index-accelerated query
|
||||||
let documents = await storage.findAll(database, collection);
|
const indexEngine = getIndexEngine(collection);
|
||||||
|
const candidateIds = await indexEngine.findCandidateIds(query);
|
||||||
|
|
||||||
// Apply filter
|
let documents: IStoredDocument[];
|
||||||
documents = QueryEngine.filter(documents, query);
|
if (candidateIds !== null) {
|
||||||
|
// Index hit - fetch only candidate documents
|
||||||
|
documents = await storage.findByIds(database, collection, candidateIds);
|
||||||
|
documents = QueryEngine.filter(documents, query);
|
||||||
|
} else {
|
||||||
|
// No suitable index - full collection scan
|
||||||
|
documents = await storage.findAll(database, collection);
|
||||||
|
documents = QueryEngine.filter(documents, query);
|
||||||
|
}
|
||||||
|
|
||||||
// Apply skip
|
// Apply skip
|
||||||
if (skip > 0) {
|
if (skip > 0) {
|
||||||
@@ -269,7 +290,7 @@ export class FindHandler implements ICommandHandler {
|
|||||||
* Handle distinct command
|
* Handle distinct command
|
||||||
*/
|
*/
|
||||||
private async handleDistinct(context: IHandlerContext): Promise<plugins.bson.Document> {
|
private async handleDistinct(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
const { storage, database, command } = context;
|
const { storage, database, command, getIndexEngine } = context;
|
||||||
|
|
||||||
const collection = command.distinct;
|
const collection = command.distinct;
|
||||||
const key = command.key;
|
const key = command.key;
|
||||||
@@ -290,8 +311,16 @@ export class FindHandler implements ICommandHandler {
|
|||||||
return { ok: 1, values: [] };
|
return { ok: 1, values: [] };
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get all documents
|
// Try to use index-accelerated query
|
||||||
const documents = await storage.findAll(database, collection);
|
const indexEngine = getIndexEngine(collection);
|
||||||
|
const candidateIds = await indexEngine.findCandidateIds(query);
|
||||||
|
|
||||||
|
let documents: IStoredDocument[];
|
||||||
|
if (candidateIds !== null) {
|
||||||
|
documents = await storage.findByIds(database, collection, candidateIds);
|
||||||
|
} else {
|
||||||
|
documents = await storage.findAll(database, collection);
|
||||||
|
}
|
||||||
|
|
||||||
// Get distinct values
|
// Get distinct values
|
||||||
const values = QueryEngine.distinct(documents, key, query);
|
const values = QueryEngine.distinct(documents, key, query);
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import * as plugins from '../../tsmdb.plugins.js';
|
import * as plugins from '../../plugins.js';
|
||||||
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import * as plugins from '../../tsmdb.plugins.js';
|
import * as plugins from '../../plugins.js';
|
||||||
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
||||||
import { IndexEngine } from '../../engine/IndexEngine.js';
|
import { IndexEngine } from '../../engine/IndexEngine.js';
|
||||||
|
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
import * as plugins from '../../tsmdb.plugins.js';
|
import * as plugins from '../../plugins.js';
|
||||||
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
||||||
|
import type { IStoredDocument } from '../../types/interfaces.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* InsertHandler - Handles insert commands
|
* InsertHandler - Handles insert commands
|
||||||
@@ -42,6 +43,8 @@ export class InsertHandler implements ICommandHandler {
|
|||||||
// Ensure collection exists
|
// Ensure collection exists
|
||||||
await storage.createCollection(database, collection);
|
await storage.createCollection(database, collection);
|
||||||
|
|
||||||
|
const indexEngine = context.getIndexEngine(collection);
|
||||||
|
|
||||||
// Insert documents
|
// Insert documents
|
||||||
for (let i = 0; i < documents.length; i++) {
|
for (let i = 0; i < documents.length; i++) {
|
||||||
const doc = documents[i];
|
const doc = documents[i];
|
||||||
@@ -52,6 +55,9 @@ export class InsertHandler implements ICommandHandler {
|
|||||||
doc._id = new plugins.bson.ObjectId();
|
doc._id = new plugins.bson.ObjectId();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check index constraints before insert (doc now has _id)
|
||||||
|
await indexEngine.onInsert(doc as IStoredDocument);
|
||||||
|
|
||||||
await storage.insertOne(database, collection, doc);
|
await storage.insertOne(database, collection, doc);
|
||||||
insertedCount++;
|
insertedCount++;
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
import * as plugins from '../../tsmdb.plugins.js';
|
import * as plugins from '../../plugins.js';
|
||||||
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
||||||
|
import type { IStoredDocument } from '../../types/interfaces.js';
|
||||||
import { QueryEngine } from '../../engine/QueryEngine.js';
|
import { QueryEngine } from '../../engine/QueryEngine.js';
|
||||||
import { UpdateEngine } from '../../engine/UpdateEngine.js';
|
import { UpdateEngine } from '../../engine/UpdateEngine.js';
|
||||||
|
|
||||||
@@ -69,6 +70,8 @@ export class UpdateHandler implements ICommandHandler {
|
|||||||
// Ensure collection exists
|
// Ensure collection exists
|
||||||
await storage.createCollection(database, collection);
|
await storage.createCollection(database, collection);
|
||||||
|
|
||||||
|
const indexEngine = context.getIndexEngine(collection);
|
||||||
|
|
||||||
for (let i = 0; i < updates.length; i++) {
|
for (let i = 0; i < updates.length; i++) {
|
||||||
const updateSpec = updates[i];
|
const updateSpec = updates[i];
|
||||||
const filter = updateSpec.q || updateSpec.filter || {};
|
const filter = updateSpec.q || updateSpec.filter || {};
|
||||||
@@ -78,8 +81,15 @@ export class UpdateHandler implements ICommandHandler {
|
|||||||
const arrayFilters = updateSpec.arrayFilters;
|
const arrayFilters = updateSpec.arrayFilters;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Get all documents
|
// Try to use index-accelerated query
|
||||||
let documents = await storage.findAll(database, collection);
|
const candidateIds = await indexEngine.findCandidateIds(filter);
|
||||||
|
|
||||||
|
let documents: IStoredDocument[];
|
||||||
|
if (candidateIds !== null) {
|
||||||
|
documents = await storage.findByIds(database, collection, candidateIds);
|
||||||
|
} else {
|
||||||
|
documents = await storage.findAll(database, collection);
|
||||||
|
}
|
||||||
|
|
||||||
// Apply filter
|
// Apply filter
|
||||||
let matchingDocs = QueryEngine.filter(documents, filter);
|
let matchingDocs = QueryEngine.filter(documents, filter);
|
||||||
@@ -99,6 +109,8 @@ export class UpdateHandler implements ICommandHandler {
|
|||||||
Object.assign(updatedDoc, update.$setOnInsert);
|
Object.assign(updatedDoc, update.$setOnInsert);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update index for the new document
|
||||||
|
await indexEngine.onInsert(updatedDoc);
|
||||||
await storage.insertOne(database, collection, updatedDoc);
|
await storage.insertOne(database, collection, updatedDoc);
|
||||||
totalUpserted++;
|
totalUpserted++;
|
||||||
upserted.push({ index: i, _id: updatedDoc._id });
|
upserted.push({ index: i, _id: updatedDoc._id });
|
||||||
@@ -113,6 +125,8 @@ export class UpdateHandler implements ICommandHandler {
|
|||||||
// Check if document actually changed
|
// Check if document actually changed
|
||||||
const changed = JSON.stringify(doc) !== JSON.stringify(updatedDoc);
|
const changed = JSON.stringify(doc) !== JSON.stringify(updatedDoc);
|
||||||
if (changed) {
|
if (changed) {
|
||||||
|
// Update index
|
||||||
|
await indexEngine.onUpdate(doc as any, updatedDoc);
|
||||||
await storage.updateById(database, collection, doc._id, updatedDoc);
|
await storage.updateById(database, collection, doc._id, updatedDoc);
|
||||||
totalModified++;
|
totalModified++;
|
||||||
}
|
}
|
||||||
@@ -186,8 +200,17 @@ export class UpdateHandler implements ICommandHandler {
|
|||||||
// Ensure collection exists
|
// Ensure collection exists
|
||||||
await storage.createCollection(database, collection);
|
await storage.createCollection(database, collection);
|
||||||
|
|
||||||
// Get matching documents
|
// Try to use index-accelerated query
|
||||||
let documents = await storage.findAll(database, collection);
|
const indexEngine = context.getIndexEngine(collection);
|
||||||
|
const candidateIds = await indexEngine.findCandidateIds(query);
|
||||||
|
|
||||||
|
let documents: IStoredDocument[];
|
||||||
|
if (candidateIds !== null) {
|
||||||
|
documents = await storage.findByIds(database, collection, candidateIds);
|
||||||
|
} else {
|
||||||
|
documents = await storage.findAll(database, collection);
|
||||||
|
}
|
||||||
|
|
||||||
let matchingDocs = QueryEngine.filter(documents, query);
|
let matchingDocs = QueryEngine.filter(documents, query);
|
||||||
|
|
||||||
// Apply sort if specified
|
// Apply sort if specified
|
||||||
@@ -203,6 +226,8 @@ export class UpdateHandler implements ICommandHandler {
|
|||||||
return { ok: 1, value: null };
|
return { ok: 1, value: null };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update index for delete
|
||||||
|
await indexEngine.onDelete(doc as any);
|
||||||
await storage.deleteById(database, collection, doc._id);
|
await storage.deleteById(database, collection, doc._id);
|
||||||
|
|
||||||
let result = doc;
|
let result = doc;
|
||||||
@@ -231,6 +256,8 @@ export class UpdateHandler implements ICommandHandler {
|
|||||||
// Update existing
|
// Update existing
|
||||||
originalDoc = { ...doc };
|
originalDoc = { ...doc };
|
||||||
resultDoc = UpdateEngine.applyUpdate(doc, update, arrayFilters);
|
resultDoc = UpdateEngine.applyUpdate(doc, update, arrayFilters);
|
||||||
|
// Update index
|
||||||
|
await indexEngine.onUpdate(doc as any, resultDoc as any);
|
||||||
await storage.updateById(database, collection, doc._id, resultDoc as any);
|
await storage.updateById(database, collection, doc._id, resultDoc as any);
|
||||||
} else {
|
} else {
|
||||||
// Upsert
|
// Upsert
|
||||||
@@ -243,6 +270,8 @@ export class UpdateHandler implements ICommandHandler {
|
|||||||
Object.assign(resultDoc, update.$setOnInsert);
|
Object.assign(resultDoc, update.$setOnInsert);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update index for insert
|
||||||
|
await indexEngine.onInsert(resultDoc as any);
|
||||||
await storage.insertOne(database, collection, resultDoc);
|
await storage.insertOne(database, collection, resultDoc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1,6 +1,17 @@
|
|||||||
import * as plugins from '../tsmdb.plugins.js';
|
import * as plugins from '../plugins.js';
|
||||||
import type { IStorageAdapter } from './IStorageAdapter.js';
|
import type { IStorageAdapter } from './IStorageAdapter.js';
|
||||||
import type { IStoredDocument, IOpLogEntry, Document } from '../types/interfaces.js';
|
import type { IStoredDocument, IOpLogEntry, Document } from '../types/interfaces.js';
|
||||||
|
import { calculateDocumentChecksum, verifyChecksum } from '../utils/checksum.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* File storage adapter options
|
||||||
|
*/
|
||||||
|
export interface IFileStorageAdapterOptions {
|
||||||
|
/** Enable checksum verification for data integrity */
|
||||||
|
enableChecksums?: boolean;
|
||||||
|
/** Throw error on checksum mismatch (default: false, just log warning) */
|
||||||
|
strictChecksums?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* File-based storage adapter for TsmDB
|
* File-based storage adapter for TsmDB
|
||||||
@@ -11,9 +22,13 @@ export class FileStorageAdapter implements IStorageAdapter {
|
|||||||
private opLogCounter = 0;
|
private opLogCounter = 0;
|
||||||
private initialized = false;
|
private initialized = false;
|
||||||
private fs = new plugins.smartfs.SmartFs(new plugins.smartfs.SmartFsProviderNode());
|
private fs = new plugins.smartfs.SmartFs(new plugins.smartfs.SmartFsProviderNode());
|
||||||
|
private enableChecksums: boolean;
|
||||||
|
private strictChecksums: boolean;
|
||||||
|
|
||||||
constructor(basePath: string) {
|
constructor(basePath: string, options?: IFileStorageAdapterOptions) {
|
||||||
this.basePath = basePath;
|
this.basePath = basePath;
|
||||||
|
this.enableChecksums = options?.enableChecksums ?? false;
|
||||||
|
this.strictChecksums = options?.strictChecksums ?? false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
@@ -68,6 +83,45 @@ export class FileStorageAdapter implements IStorageAdapter {
|
|||||||
return doc;
|
return doc;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify document checksum and handle errors
|
||||||
|
*/
|
||||||
|
private verifyDocumentChecksum(doc: any): boolean {
|
||||||
|
if (!this.enableChecksums || !doc._checksum) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const isValid = verifyChecksum(doc);
|
||||||
|
if (!isValid) {
|
||||||
|
const errorMsg = `Checksum mismatch for document ${doc._id}`;
|
||||||
|
if (this.strictChecksums) {
|
||||||
|
throw new Error(errorMsg);
|
||||||
|
} else {
|
||||||
|
console.warn(`WARNING: ${errorMsg}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return isValid;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add checksum to document before storing
|
||||||
|
*/
|
||||||
|
private prepareDocumentForStorage(doc: any): any {
|
||||||
|
if (!this.enableChecksums) {
|
||||||
|
return doc;
|
||||||
|
}
|
||||||
|
const checksum = calculateDocumentChecksum(doc);
|
||||||
|
return { ...doc, _checksum: checksum };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove internal checksum field before returning to user
|
||||||
|
*/
|
||||||
|
private cleanDocumentForReturn(doc: any): IStoredDocument {
|
||||||
|
const { _checksum, ...cleanDoc } = doc;
|
||||||
|
return this.restoreObjectIds(cleanDoc);
|
||||||
|
}
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// Initialization
|
// Initialization
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
@@ -233,7 +287,9 @@ export class FileStorageAdapter implements IStorageAdapter {
|
|||||||
throw new Error(`Duplicate key error: _id ${idStr}`);
|
throw new Error(`Duplicate key error: _id ${idStr}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
docs.push(storedDoc);
|
// Add checksum if enabled
|
||||||
|
const docToStore = this.prepareDocumentForStorage(storedDoc);
|
||||||
|
docs.push(docToStore);
|
||||||
await this.writeJsonFile(collPath, docs);
|
await this.writeJsonFile(collPath, docs);
|
||||||
return storedDoc;
|
return storedDoc;
|
||||||
}
|
}
|
||||||
@@ -258,7 +314,9 @@ export class FileStorageAdapter implements IStorageAdapter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
existingIds.add(idStr);
|
existingIds.add(idStr);
|
||||||
docs.push(storedDoc);
|
// Add checksum if enabled
|
||||||
|
const docToStore = this.prepareDocumentForStorage(storedDoc);
|
||||||
|
docs.push(docToStore);
|
||||||
results.push(storedDoc);
|
results.push(storedDoc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -270,10 +328,33 @@ export class FileStorageAdapter implements IStorageAdapter {
|
|||||||
await this.createCollection(dbName, collName);
|
await this.createCollection(dbName, collName);
|
||||||
const collPath = this.getCollectionPath(dbName, collName);
|
const collPath = this.getCollectionPath(dbName, collName);
|
||||||
const docs = await this.readJsonFile<any[]>(collPath, []);
|
const docs = await this.readJsonFile<any[]>(collPath, []);
|
||||||
return docs.map(doc => this.restoreObjectIds(doc));
|
return docs.map(doc => {
|
||||||
|
// Verify checksum if enabled
|
||||||
|
this.verifyDocumentChecksum(doc);
|
||||||
|
// Clean and return document without internal checksum field
|
||||||
|
return this.cleanDocumentForReturn(doc);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async findByIds(dbName: string, collName: string, ids: Set<string>): Promise<IStoredDocument[]> {
|
||||||
|
await this.createCollection(dbName, collName);
|
||||||
|
const collPath = this.getCollectionPath(dbName, collName);
|
||||||
|
const docs = await this.readJsonFile<any[]>(collPath, []);
|
||||||
|
const results: IStoredDocument[] = [];
|
||||||
|
for (const doc of docs) {
|
||||||
|
// Verify checksum if enabled
|
||||||
|
this.verifyDocumentChecksum(doc);
|
||||||
|
// Clean and restore document
|
||||||
|
const cleaned = this.cleanDocumentForReturn(doc);
|
||||||
|
if (ids.has(cleaned._id.toHexString())) {
|
||||||
|
results.push(cleaned);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
async findById(dbName: string, collName: string, id: plugins.bson.ObjectId): Promise<IStoredDocument | null> {
|
async findById(dbName: string, collName: string, id: plugins.bson.ObjectId): Promise<IStoredDocument | null> {
|
||||||
|
// Use findAll which already handles checksum verification
|
||||||
const docs = await this.findAll(dbName, collName);
|
const docs = await this.findAll(dbName, collName);
|
||||||
const idStr = id.toHexString();
|
const idStr = id.toHexString();
|
||||||
return docs.find(d => d._id.toHexString() === idStr) || null;
|
return docs.find(d => d._id.toHexString() === idStr) || null;
|
||||||
@@ -291,7 +372,9 @@ export class FileStorageAdapter implements IStorageAdapter {
|
|||||||
|
|
||||||
if (idx === -1) return false;
|
if (idx === -1) return false;
|
||||||
|
|
||||||
docs[idx] = doc;
|
// Add checksum if enabled
|
||||||
|
const docToStore = this.prepareDocumentForStorage(doc);
|
||||||
|
docs[idx] = docToStore;
|
||||||
await this.writeJsonFile(collPath, docs);
|
await this.writeJsonFile(collPath, docs);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import type * as plugins from '../tsmdb.plugins.js';
|
import type * as plugins from '../plugins.js';
|
||||||
import type { IStoredDocument, IOpLogEntry, Document } from '../types/interfaces.js';
|
import type { IStoredDocument, IOpLogEntry, Document } from '../types/interfaces.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -90,6 +90,12 @@ export interface IStorageAdapter {
|
|||||||
*/
|
*/
|
||||||
findAll(dbName: string, collName: string): Promise<IStoredDocument[]>;
|
findAll(dbName: string, collName: string): Promise<IStoredDocument[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find documents by a set of _id strings (hex format)
|
||||||
|
* Used for index-accelerated queries
|
||||||
|
*/
|
||||||
|
findByIds(dbName: string, collName: string, ids: Set<string>): Promise<IStoredDocument[]>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find a document by _id
|
* Find a document by _id
|
||||||
*/
|
*/
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import * as plugins from '../tsmdb.plugins.js';
|
import * as plugins from '../plugins.js';
|
||||||
import type { IStorageAdapter } from './IStorageAdapter.js';
|
import type { IStorageAdapter } from './IStorageAdapter.js';
|
||||||
import type { IStoredDocument, IOpLogEntry, Document } from '../types/interfaces.js';
|
import type { IStoredDocument, IOpLogEntry, Document } from '../types/interfaces.js';
|
||||||
|
|
||||||
@@ -196,6 +196,18 @@ export class MemoryStorageAdapter implements IStorageAdapter {
|
|||||||
return Array.from(collection.values());
|
return Array.from(collection.values());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async findByIds(dbName: string, collName: string, ids: Set<string>): Promise<IStoredDocument[]> {
|
||||||
|
const collection = this.ensureCollection(dbName, collName);
|
||||||
|
const results: IStoredDocument[] = [];
|
||||||
|
for (const id of ids) {
|
||||||
|
const doc = collection.get(id);
|
||||||
|
if (doc) {
|
||||||
|
results.push(doc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
async findById(dbName: string, collName: string, id: plugins.bson.ObjectId): Promise<IStoredDocument | null> {
|
async findById(dbName: string, collName: string, id: plugins.bson.ObjectId): Promise<IStoredDocument | null> {
|
||||||
const collection = this.ensureCollection(dbName, collName);
|
const collection = this.ensureCollection(dbName, collName);
|
||||||
return collection.get(id.toHexString()) || null;
|
return collection.get(id.toHexString()) || null;
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import * as plugins from '../tsmdb.plugins.js';
|
import * as plugins from '../plugins.js';
|
||||||
import type { IStorageAdapter } from './IStorageAdapter.js';
|
import type { IStorageAdapter } from './IStorageAdapter.js';
|
||||||
import type { IOpLogEntry, Document, IResumeToken, ChangeStreamOperationType } from '../types/interfaces.js';
|
import type { IOpLogEntry, Document, IResumeToken, ChangeStreamOperationType } from '../types/interfaces.js';
|
||||||
|
|
||||||
375
ts/ts_tsmdb/storage/WAL.ts
Normal file
375
ts/ts_tsmdb/storage/WAL.ts
Normal file
@@ -0,0 +1,375 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { Document, IStoredDocument } from '../types/interfaces.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* WAL entry operation types
|
||||||
|
*/
|
||||||
|
export type TWalOperation = 'insert' | 'update' | 'delete' | 'checkpoint' | 'begin' | 'commit' | 'abort';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* WAL entry structure
|
||||||
|
*/
|
||||||
|
export interface IWalEntry {
|
||||||
|
/** Log Sequence Number - monotonically increasing */
|
||||||
|
lsn: number;
|
||||||
|
/** Timestamp of the operation */
|
||||||
|
timestamp: number;
|
||||||
|
/** Operation type */
|
||||||
|
operation: TWalOperation;
|
||||||
|
/** Database name */
|
||||||
|
dbName: string;
|
||||||
|
/** Collection name */
|
||||||
|
collName: string;
|
||||||
|
/** Document ID (hex string) */
|
||||||
|
documentId: string;
|
||||||
|
/** Document data (BSON serialized, base64 encoded) */
|
||||||
|
data?: string;
|
||||||
|
/** Previous document data for updates (for rollback) */
|
||||||
|
previousData?: string;
|
||||||
|
/** Transaction ID if part of a transaction */
|
||||||
|
txnId?: string;
|
||||||
|
/** CRC32 checksum of the entry (excluding this field) */
|
||||||
|
checksum: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checkpoint record
|
||||||
|
*/
|
||||||
|
interface ICheckpointRecord {
|
||||||
|
lsn: number;
|
||||||
|
timestamp: number;
|
||||||
|
lastCommittedLsn: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Write-Ahead Log (WAL) for durability and crash recovery
|
||||||
|
*
|
||||||
|
* The WAL ensures durability by writing operations to a log file before
|
||||||
|
* they are applied to the main storage. On crash recovery, uncommitted
|
||||||
|
* operations can be replayed to restore the database to a consistent state.
|
||||||
|
*/
|
||||||
|
export class WAL {
|
||||||
|
private walPath: string;
|
||||||
|
private currentLsn: number = 0;
|
||||||
|
private lastCheckpointLsn: number = 0;
|
||||||
|
private entries: IWalEntry[] = [];
|
||||||
|
private isInitialized: boolean = false;
|
||||||
|
private fs = new plugins.smartfs.SmartFs(new plugins.smartfs.SmartFsProviderNode());
|
||||||
|
|
||||||
|
// In-memory uncommitted entries per transaction
|
||||||
|
private uncommittedTxns: Map<string, IWalEntry[]> = new Map();
|
||||||
|
|
||||||
|
// Checkpoint interval (number of entries between checkpoints)
|
||||||
|
private checkpointInterval: number = 1000;
|
||||||
|
|
||||||
|
constructor(walPath: string, options?: { checkpointInterval?: number }) {
|
||||||
|
this.walPath = walPath;
|
||||||
|
if (options?.checkpointInterval) {
|
||||||
|
this.checkpointInterval = options.checkpointInterval;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize the WAL, loading existing entries and recovering if needed
|
||||||
|
*/
|
||||||
|
async initialize(): Promise<{ recoveredEntries: IWalEntry[] }> {
|
||||||
|
if (this.isInitialized) {
|
||||||
|
return { recoveredEntries: [] };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure WAL directory exists
|
||||||
|
const walDir = this.walPath.substring(0, this.walPath.lastIndexOf('/'));
|
||||||
|
if (walDir) {
|
||||||
|
await this.fs.directory(walDir).recursive().create();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to load existing WAL
|
||||||
|
const exists = await this.fs.file(this.walPath).exists();
|
||||||
|
if (exists) {
|
||||||
|
const content = await this.fs.file(this.walPath).encoding('utf8').read();
|
||||||
|
const lines = (content as string).split('\n').filter(line => line.trim());
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
try {
|
||||||
|
const entry = JSON.parse(line) as IWalEntry;
|
||||||
|
// Verify checksum
|
||||||
|
if (this.verifyChecksum(entry)) {
|
||||||
|
this.entries.push(entry);
|
||||||
|
if (entry.lsn > this.currentLsn) {
|
||||||
|
this.currentLsn = entry.lsn;
|
||||||
|
}
|
||||||
|
if (entry.operation === 'checkpoint') {
|
||||||
|
this.lastCheckpointLsn = entry.lsn;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Skip corrupted entries
|
||||||
|
console.warn('Skipping corrupted WAL entry');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.isInitialized = true;
|
||||||
|
|
||||||
|
// Return entries after last checkpoint that need recovery
|
||||||
|
const recoveredEntries = this.entries.filter(
|
||||||
|
e => e.lsn > this.lastCheckpointLsn &&
|
||||||
|
(e.operation === 'insert' || e.operation === 'update' || e.operation === 'delete')
|
||||||
|
);
|
||||||
|
|
||||||
|
return { recoveredEntries };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log an insert operation
|
||||||
|
*/
|
||||||
|
async logInsert(dbName: string, collName: string, doc: IStoredDocument, txnId?: string): Promise<number> {
|
||||||
|
return this.appendEntry({
|
||||||
|
operation: 'insert',
|
||||||
|
dbName,
|
||||||
|
collName,
|
||||||
|
documentId: doc._id.toHexString(),
|
||||||
|
data: this.serializeDocument(doc),
|
||||||
|
txnId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log an update operation
|
||||||
|
*/
|
||||||
|
async logUpdate(
|
||||||
|
dbName: string,
|
||||||
|
collName: string,
|
||||||
|
oldDoc: IStoredDocument,
|
||||||
|
newDoc: IStoredDocument,
|
||||||
|
txnId?: string
|
||||||
|
): Promise<number> {
|
||||||
|
return this.appendEntry({
|
||||||
|
operation: 'update',
|
||||||
|
dbName,
|
||||||
|
collName,
|
||||||
|
documentId: oldDoc._id.toHexString(),
|
||||||
|
data: this.serializeDocument(newDoc),
|
||||||
|
previousData: this.serializeDocument(oldDoc),
|
||||||
|
txnId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log a delete operation
|
||||||
|
*/
|
||||||
|
async logDelete(dbName: string, collName: string, doc: IStoredDocument, txnId?: string): Promise<number> {
|
||||||
|
return this.appendEntry({
|
||||||
|
operation: 'delete',
|
||||||
|
dbName,
|
||||||
|
collName,
|
||||||
|
documentId: doc._id.toHexString(),
|
||||||
|
previousData: this.serializeDocument(doc),
|
||||||
|
txnId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log transaction begin
|
||||||
|
*/
|
||||||
|
async logBeginTransaction(txnId: string): Promise<number> {
|
||||||
|
this.uncommittedTxns.set(txnId, []);
|
||||||
|
return this.appendEntry({
|
||||||
|
operation: 'begin',
|
||||||
|
dbName: '',
|
||||||
|
collName: '',
|
||||||
|
documentId: '',
|
||||||
|
txnId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log transaction commit
|
||||||
|
*/
|
||||||
|
async logCommitTransaction(txnId: string): Promise<number> {
|
||||||
|
this.uncommittedTxns.delete(txnId);
|
||||||
|
return this.appendEntry({
|
||||||
|
operation: 'commit',
|
||||||
|
dbName: '',
|
||||||
|
collName: '',
|
||||||
|
documentId: '',
|
||||||
|
txnId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log transaction abort
|
||||||
|
*/
|
||||||
|
async logAbortTransaction(txnId: string): Promise<number> {
|
||||||
|
this.uncommittedTxns.delete(txnId);
|
||||||
|
return this.appendEntry({
|
||||||
|
operation: 'abort',
|
||||||
|
dbName: '',
|
||||||
|
collName: '',
|
||||||
|
documentId: '',
|
||||||
|
txnId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get entries to roll back for an aborted transaction
|
||||||
|
*/
|
||||||
|
getTransactionEntries(txnId: string): IWalEntry[] {
|
||||||
|
return this.entries.filter(e => e.txnId === txnId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a checkpoint - marks a consistent point in the log
|
||||||
|
*/
|
||||||
|
async checkpoint(): Promise<number> {
|
||||||
|
const lsn = await this.appendEntry({
|
||||||
|
operation: 'checkpoint',
|
||||||
|
dbName: '',
|
||||||
|
collName: '',
|
||||||
|
documentId: '',
|
||||||
|
});
|
||||||
|
this.lastCheckpointLsn = lsn;
|
||||||
|
|
||||||
|
// Truncate old entries (keep only entries after checkpoint)
|
||||||
|
await this.truncate();
|
||||||
|
|
||||||
|
return lsn;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Truncate the WAL file, removing entries before the last checkpoint
|
||||||
|
*/
|
||||||
|
private async truncate(): Promise<void> {
|
||||||
|
// Keep entries after last checkpoint
|
||||||
|
const newEntries = this.entries.filter(e => e.lsn >= this.lastCheckpointLsn);
|
||||||
|
this.entries = newEntries;
|
||||||
|
|
||||||
|
// Rewrite the WAL file
|
||||||
|
const lines = this.entries.map(e => JSON.stringify(e)).join('\n');
|
||||||
|
await this.fs.file(this.walPath).encoding('utf8').write(lines);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current LSN
|
||||||
|
*/
|
||||||
|
getCurrentLsn(): number {
|
||||||
|
return this.currentLsn;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get entries after a specific LSN (for recovery)
|
||||||
|
*/
|
||||||
|
getEntriesAfter(lsn: number): IWalEntry[] {
|
||||||
|
return this.entries.filter(e => e.lsn > lsn);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Close the WAL
|
||||||
|
*/
|
||||||
|
async close(): Promise<void> {
|
||||||
|
if (this.isInitialized) {
|
||||||
|
// Final checkpoint before close
|
||||||
|
await this.checkpoint();
|
||||||
|
}
|
||||||
|
this.isInitialized = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Private Methods
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
private async appendEntry(
|
||||||
|
partial: Omit<IWalEntry, 'lsn' | 'timestamp' | 'checksum'>
|
||||||
|
): Promise<number> {
|
||||||
|
await this.initialize();
|
||||||
|
|
||||||
|
this.currentLsn++;
|
||||||
|
const entry: IWalEntry = {
|
||||||
|
...partial,
|
||||||
|
lsn: this.currentLsn,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
checksum: 0, // Will be calculated
|
||||||
|
};
|
||||||
|
|
||||||
|
// Calculate checksum
|
||||||
|
entry.checksum = this.calculateChecksum(entry);
|
||||||
|
|
||||||
|
// Track in transaction if applicable
|
||||||
|
if (partial.txnId && this.uncommittedTxns.has(partial.txnId)) {
|
||||||
|
this.uncommittedTxns.get(partial.txnId)!.push(entry);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add to in-memory log
|
||||||
|
this.entries.push(entry);
|
||||||
|
|
||||||
|
// Append to file (append mode for durability)
|
||||||
|
await this.fs.file(this.walPath).encoding('utf8').append(JSON.stringify(entry) + '\n');
|
||||||
|
|
||||||
|
// Check if we need a checkpoint
|
||||||
|
if (this.entries.length - this.lastCheckpointLsn >= this.checkpointInterval) {
|
||||||
|
await this.checkpoint();
|
||||||
|
}
|
||||||
|
|
||||||
|
return entry.lsn;
|
||||||
|
}
|
||||||
|
|
||||||
|
private serializeDocument(doc: Document): string {
|
||||||
|
// Serialize document to BSON and encode as base64
|
||||||
|
const bsonData = plugins.bson.serialize(doc);
|
||||||
|
return Buffer.from(bsonData).toString('base64');
|
||||||
|
}
|
||||||
|
|
||||||
|
private deserializeDocument(data: string): Document {
|
||||||
|
// Decode base64 and deserialize from BSON
|
||||||
|
const buffer = Buffer.from(data, 'base64');
|
||||||
|
return plugins.bson.deserialize(buffer);
|
||||||
|
}
|
||||||
|
|
||||||
|
private calculateChecksum(entry: IWalEntry): number {
|
||||||
|
// Simple CRC32-like checksum
|
||||||
|
const str = JSON.stringify({
|
||||||
|
lsn: entry.lsn,
|
||||||
|
timestamp: entry.timestamp,
|
||||||
|
operation: entry.operation,
|
||||||
|
dbName: entry.dbName,
|
||||||
|
collName: entry.collName,
|
||||||
|
documentId: entry.documentId,
|
||||||
|
data: entry.data,
|
||||||
|
previousData: entry.previousData,
|
||||||
|
txnId: entry.txnId,
|
||||||
|
});
|
||||||
|
|
||||||
|
let crc = 0xFFFFFFFF;
|
||||||
|
for (let i = 0; i < str.length; i++) {
|
||||||
|
crc ^= str.charCodeAt(i);
|
||||||
|
for (let j = 0; j < 8; j++) {
|
||||||
|
crc = (crc >>> 1) ^ (crc & 1 ? 0xEDB88320 : 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return (~crc) >>> 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
private verifyChecksum(entry: IWalEntry): boolean {
|
||||||
|
const savedChecksum = entry.checksum;
|
||||||
|
entry.checksum = 0;
|
||||||
|
const calculatedChecksum = this.calculateChecksum(entry);
|
||||||
|
entry.checksum = savedChecksum;
|
||||||
|
return calculatedChecksum === savedChecksum;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recover document from WAL entry
|
||||||
|
*/
|
||||||
|
recoverDocument(entry: IWalEntry): IStoredDocument | null {
|
||||||
|
if (!entry.data) return null;
|
||||||
|
return this.deserializeDocument(entry.data) as IStoredDocument;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recover previous document state from WAL entry (for rollback)
|
||||||
|
*/
|
||||||
|
recoverPreviousDocument(entry: IWalEntry): IStoredDocument | null {
|
||||||
|
if (!entry.previousData) return null;
|
||||||
|
return this.deserializeDocument(entry.previousData) as IStoredDocument;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import type * as plugins from '../tsmdb.plugins.js';
|
import type * as plugins from '../plugins.js';
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// Document Types
|
// Document Types
|
||||||
88
ts/ts_tsmdb/utils/checksum.ts
Normal file
88
ts/ts_tsmdb/utils/checksum.ts
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
/**
|
||||||
|
* CRC32 checksum utilities for data integrity
|
||||||
|
*/
|
||||||
|
|
||||||
|
// CRC32 lookup table
|
||||||
|
const CRC32_TABLE: number[] = [];
|
||||||
|
|
||||||
|
// Initialize the CRC32 table
|
||||||
|
function initCRC32Table(): void {
|
||||||
|
if (CRC32_TABLE.length > 0) return;
|
||||||
|
|
||||||
|
for (let i = 0; i < 256; i++) {
|
||||||
|
let crc = i;
|
||||||
|
for (let j = 0; j < 8; j++) {
|
||||||
|
crc = (crc & 1) ? (0xEDB88320 ^ (crc >>> 1)) : (crc >>> 1);
|
||||||
|
}
|
||||||
|
CRC32_TABLE[i] = crc >>> 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate CRC32 checksum for a string
|
||||||
|
*/
|
||||||
|
export function calculateCRC32(data: string): number {
|
||||||
|
initCRC32Table();
|
||||||
|
|
||||||
|
let crc = 0xFFFFFFFF;
|
||||||
|
for (let i = 0; i < data.length; i++) {
|
||||||
|
const byte = data.charCodeAt(i) & 0xFF;
|
||||||
|
crc = CRC32_TABLE[(crc ^ byte) & 0xFF] ^ (crc >>> 8);
|
||||||
|
}
|
||||||
|
return (~crc) >>> 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate CRC32 checksum for a Buffer
|
||||||
|
*/
|
||||||
|
export function calculateCRC32Buffer(data: Buffer): number {
|
||||||
|
initCRC32Table();
|
||||||
|
|
||||||
|
let crc = 0xFFFFFFFF;
|
||||||
|
for (let i = 0; i < data.length; i++) {
|
||||||
|
crc = CRC32_TABLE[(crc ^ data[i]) & 0xFF] ^ (crc >>> 8);
|
||||||
|
}
|
||||||
|
return (~crc) >>> 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate checksum for a document (serialized as JSON)
|
||||||
|
*/
|
||||||
|
export function calculateDocumentChecksum(doc: Record<string, any>): number {
|
||||||
|
// Exclude _checksum field from calculation
|
||||||
|
const { _checksum, ...docWithoutChecksum } = doc;
|
||||||
|
const json = JSON.stringify(docWithoutChecksum);
|
||||||
|
return calculateCRC32(json);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add checksum to a document
|
||||||
|
*/
|
||||||
|
export function addChecksum<T extends Record<string, any>>(doc: T): T & { _checksum: number } {
|
||||||
|
const checksum = calculateDocumentChecksum(doc);
|
||||||
|
return { ...doc, _checksum: checksum };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify checksum of a document
|
||||||
|
* Returns true if checksum is valid or if document has no checksum
|
||||||
|
*/
|
||||||
|
export function verifyChecksum(doc: Record<string, any>): boolean {
|
||||||
|
if (!('_checksum' in doc)) {
|
||||||
|
// No checksum to verify
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const storedChecksum = doc._checksum;
|
||||||
|
const calculatedChecksum = calculateDocumentChecksum(doc);
|
||||||
|
|
||||||
|
return storedChecksum === calculatedChecksum;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove checksum from a document
|
||||||
|
*/
|
||||||
|
export function removeChecksum<T extends Record<string, any>>(doc: T): Omit<T, '_checksum'> {
|
||||||
|
const { _checksum, ...docWithoutChecksum } = doc;
|
||||||
|
return docWithoutChecksum as Omit<T, '_checksum'>;
|
||||||
|
}
|
||||||
1
ts/ts_tsmdb/utils/index.ts
Normal file
1
ts/ts_tsmdb/utils/index.ts
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export * from './checksum.js';
|
||||||
@@ -1,479 +0,0 @@
|
|||||||
import * as plugins from '../tsmdb.plugins.js';
|
|
||||||
import type { IStorageAdapter } from '../storage/IStorageAdapter.js';
|
|
||||||
import type {
|
|
||||||
Document,
|
|
||||||
IStoredDocument,
|
|
||||||
IIndexSpecification,
|
|
||||||
IIndexInfo,
|
|
||||||
ICreateIndexOptions,
|
|
||||||
} from '../types/interfaces.js';
|
|
||||||
import { TsmdbDuplicateKeyError, TsmdbIndexError } from '../errors/TsmdbErrors.js';
|
|
||||||
import { QueryEngine } from './QueryEngine.js';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Index data structure for fast lookups
|
|
||||||
*/
|
|
||||||
interface IIndexData {
|
|
||||||
name: string;
|
|
||||||
key: Record<string, 1 | -1 | string>;
|
|
||||||
unique: boolean;
|
|
||||||
sparse: boolean;
|
|
||||||
expireAfterSeconds?: number;
|
|
||||||
// Map from index key value to document _id(s)
|
|
||||||
entries: Map<string, Set<string>>;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Index engine for managing indexes and query optimization
|
|
||||||
*/
|
|
||||||
export class IndexEngine {
|
|
||||||
private dbName: string;
|
|
||||||
private collName: string;
|
|
||||||
private storage: IStorageAdapter;
|
|
||||||
private indexes: Map<string, IIndexData> = new Map();
|
|
||||||
private initialized = false;
|
|
||||||
|
|
||||||
constructor(dbName: string, collName: string, storage: IStorageAdapter) {
|
|
||||||
this.dbName = dbName;
|
|
||||||
this.collName = collName;
|
|
||||||
this.storage = storage;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialize indexes from storage
|
|
||||||
*/
|
|
||||||
async initialize(): Promise<void> {
|
|
||||||
if (this.initialized) return;
|
|
||||||
|
|
||||||
const storedIndexes = await this.storage.getIndexes(this.dbName, this.collName);
|
|
||||||
const documents = await this.storage.findAll(this.dbName, this.collName);
|
|
||||||
|
|
||||||
for (const indexSpec of storedIndexes) {
|
|
||||||
const indexData: IIndexData = {
|
|
||||||
name: indexSpec.name,
|
|
||||||
key: indexSpec.key,
|
|
||||||
unique: indexSpec.unique || false,
|
|
||||||
sparse: indexSpec.sparse || false,
|
|
||||||
expireAfterSeconds: indexSpec.expireAfterSeconds,
|
|
||||||
entries: new Map(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Build index entries
|
|
||||||
for (const doc of documents) {
|
|
||||||
const keyValue = this.extractKeyValue(doc, indexSpec.key);
|
|
||||||
if (keyValue !== null || !indexData.sparse) {
|
|
||||||
const keyStr = JSON.stringify(keyValue);
|
|
||||||
if (!indexData.entries.has(keyStr)) {
|
|
||||||
indexData.entries.set(keyStr, new Set());
|
|
||||||
}
|
|
||||||
indexData.entries.get(keyStr)!.add(doc._id.toHexString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.indexes.set(indexSpec.name, indexData);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.initialized = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a new index
|
|
||||||
*/
|
|
||||||
async createIndex(
|
|
||||||
key: Record<string, 1 | -1 | 'text' | '2dsphere'>,
|
|
||||||
options?: ICreateIndexOptions
|
|
||||||
): Promise<string> {
|
|
||||||
await this.initialize();
|
|
||||||
|
|
||||||
// Generate index name if not provided
|
|
||||||
const name = options?.name || this.generateIndexName(key);
|
|
||||||
|
|
||||||
// Check if index already exists
|
|
||||||
if (this.indexes.has(name)) {
|
|
||||||
return name;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create index data structure
|
|
||||||
const indexData: IIndexData = {
|
|
||||||
name,
|
|
||||||
key: key as Record<string, 1 | -1 | string>,
|
|
||||||
unique: options?.unique || false,
|
|
||||||
sparse: options?.sparse || false,
|
|
||||||
expireAfterSeconds: options?.expireAfterSeconds,
|
|
||||||
entries: new Map(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Build index from existing documents
|
|
||||||
const documents = await this.storage.findAll(this.dbName, this.collName);
|
|
||||||
|
|
||||||
for (const doc of documents) {
|
|
||||||
const keyValue = this.extractKeyValue(doc, key);
|
|
||||||
|
|
||||||
if (keyValue === null && indexData.sparse) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const keyStr = JSON.stringify(keyValue);
|
|
||||||
|
|
||||||
if (indexData.unique && indexData.entries.has(keyStr)) {
|
|
||||||
throw new TsmdbDuplicateKeyError(
|
|
||||||
`E11000 duplicate key error index: ${this.dbName}.${this.collName}.$${name}`,
|
|
||||||
key as Record<string, 1>,
|
|
||||||
keyValue
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!indexData.entries.has(keyStr)) {
|
|
||||||
indexData.entries.set(keyStr, new Set());
|
|
||||||
}
|
|
||||||
indexData.entries.get(keyStr)!.add(doc._id.toHexString());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Store index
|
|
||||||
this.indexes.set(name, indexData);
|
|
||||||
await this.storage.saveIndex(this.dbName, this.collName, name, {
|
|
||||||
key,
|
|
||||||
unique: options?.unique,
|
|
||||||
sparse: options?.sparse,
|
|
||||||
expireAfterSeconds: options?.expireAfterSeconds,
|
|
||||||
});
|
|
||||||
|
|
||||||
return name;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Drop an index
|
|
||||||
*/
|
|
||||||
async dropIndex(name: string): Promise<void> {
|
|
||||||
await this.initialize();
|
|
||||||
|
|
||||||
if (name === '_id_') {
|
|
||||||
throw new TsmdbIndexError('cannot drop _id index');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!this.indexes.has(name)) {
|
|
||||||
throw new TsmdbIndexError(`index not found: ${name}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.indexes.delete(name);
|
|
||||||
await this.storage.dropIndex(this.dbName, this.collName, name);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Drop all indexes except _id
|
|
||||||
*/
|
|
||||||
async dropAllIndexes(): Promise<void> {
|
|
||||||
await this.initialize();
|
|
||||||
|
|
||||||
const names = Array.from(this.indexes.keys()).filter(n => n !== '_id_');
|
|
||||||
for (const name of names) {
|
|
||||||
this.indexes.delete(name);
|
|
||||||
await this.storage.dropIndex(this.dbName, this.collName, name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* List all indexes
|
|
||||||
*/
|
|
||||||
async listIndexes(): Promise<IIndexInfo[]> {
|
|
||||||
await this.initialize();
|
|
||||||
|
|
||||||
return Array.from(this.indexes.values()).map(idx => ({
|
|
||||||
v: 2,
|
|
||||||
key: idx.key,
|
|
||||||
name: idx.name,
|
|
||||||
unique: idx.unique || undefined,
|
|
||||||
sparse: idx.sparse || undefined,
|
|
||||||
expireAfterSeconds: idx.expireAfterSeconds,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if an index exists
|
|
||||||
*/
|
|
||||||
async indexExists(name: string): Promise<boolean> {
|
|
||||||
await this.initialize();
|
|
||||||
return this.indexes.has(name);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Update index entries after document insert
|
|
||||||
*/
|
|
||||||
async onInsert(doc: IStoredDocument): Promise<void> {
|
|
||||||
await this.initialize();
|
|
||||||
|
|
||||||
for (const [name, indexData] of this.indexes) {
|
|
||||||
const keyValue = this.extractKeyValue(doc, indexData.key);
|
|
||||||
|
|
||||||
if (keyValue === null && indexData.sparse) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const keyStr = JSON.stringify(keyValue);
|
|
||||||
|
|
||||||
// Check unique constraint
|
|
||||||
if (indexData.unique) {
|
|
||||||
const existing = indexData.entries.get(keyStr);
|
|
||||||
if (existing && existing.size > 0) {
|
|
||||||
throw new TsmdbDuplicateKeyError(
|
|
||||||
`E11000 duplicate key error collection: ${this.dbName}.${this.collName} index: ${name}`,
|
|
||||||
indexData.key as Record<string, 1>,
|
|
||||||
keyValue
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!indexData.entries.has(keyStr)) {
|
|
||||||
indexData.entries.set(keyStr, new Set());
|
|
||||||
}
|
|
||||||
indexData.entries.get(keyStr)!.add(doc._id.toHexString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Update index entries after document update
|
|
||||||
*/
|
|
||||||
async onUpdate(oldDoc: IStoredDocument, newDoc: IStoredDocument): Promise<void> {
|
|
||||||
await this.initialize();
|
|
||||||
|
|
||||||
for (const [name, indexData] of this.indexes) {
|
|
||||||
const oldKeyValue = this.extractKeyValue(oldDoc, indexData.key);
|
|
||||||
const newKeyValue = this.extractKeyValue(newDoc, indexData.key);
|
|
||||||
const oldKeyStr = JSON.stringify(oldKeyValue);
|
|
||||||
const newKeyStr = JSON.stringify(newKeyValue);
|
|
||||||
|
|
||||||
// Remove old entry if key changed
|
|
||||||
if (oldKeyStr !== newKeyStr) {
|
|
||||||
if (oldKeyValue !== null || !indexData.sparse) {
|
|
||||||
const oldSet = indexData.entries.get(oldKeyStr);
|
|
||||||
if (oldSet) {
|
|
||||||
oldSet.delete(oldDoc._id.toHexString());
|
|
||||||
if (oldSet.size === 0) {
|
|
||||||
indexData.entries.delete(oldKeyStr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add new entry
|
|
||||||
if (newKeyValue !== null || !indexData.sparse) {
|
|
||||||
// Check unique constraint
|
|
||||||
if (indexData.unique) {
|
|
||||||
const existing = indexData.entries.get(newKeyStr);
|
|
||||||
if (existing && existing.size > 0) {
|
|
||||||
throw new TsmdbDuplicateKeyError(
|
|
||||||
`E11000 duplicate key error collection: ${this.dbName}.${this.collName} index: ${name}`,
|
|
||||||
indexData.key as Record<string, 1>,
|
|
||||||
newKeyValue
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!indexData.entries.has(newKeyStr)) {
|
|
||||||
indexData.entries.set(newKeyStr, new Set());
|
|
||||||
}
|
|
||||||
indexData.entries.get(newKeyStr)!.add(newDoc._id.toHexString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Update index entries after document delete
|
|
||||||
*/
|
|
||||||
async onDelete(doc: IStoredDocument): Promise<void> {
|
|
||||||
await this.initialize();
|
|
||||||
|
|
||||||
for (const indexData of this.indexes.values()) {
|
|
||||||
const keyValue = this.extractKeyValue(doc, indexData.key);
|
|
||||||
|
|
||||||
if (keyValue === null && indexData.sparse) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const keyStr = JSON.stringify(keyValue);
|
|
||||||
const set = indexData.entries.get(keyStr);
|
|
||||||
if (set) {
|
|
||||||
set.delete(doc._id.toHexString());
|
|
||||||
if (set.size === 0) {
|
|
||||||
indexData.entries.delete(keyStr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Find the best index for a query
|
|
||||||
*/
|
|
||||||
selectIndex(filter: Document): { name: string; data: IIndexData } | null {
|
|
||||||
if (!filter || Object.keys(filter).length === 0) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get filter fields
|
|
||||||
const filterFields = new Set(this.getFilterFields(filter));
|
|
||||||
|
|
||||||
// Score each index
|
|
||||||
let bestIndex: { name: string; data: IIndexData } | null = null;
|
|
||||||
let bestScore = 0;
|
|
||||||
|
|
||||||
for (const [name, indexData] of this.indexes) {
|
|
||||||
const indexFields = Object.keys(indexData.key);
|
|
||||||
let score = 0;
|
|
||||||
|
|
||||||
// Count how many index fields are in the filter
|
|
||||||
for (const field of indexFields) {
|
|
||||||
if (filterFields.has(field)) {
|
|
||||||
score++;
|
|
||||||
} else {
|
|
||||||
break; // Index fields must be contiguous
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prefer unique indexes
|
|
||||||
if (indexData.unique && score > 0) {
|
|
||||||
score += 0.5;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (score > bestScore) {
|
|
||||||
bestScore = score;
|
|
||||||
bestIndex = { name, data: indexData };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return bestIndex;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Use index to find candidate document IDs
|
|
||||||
*/
|
|
||||||
async findCandidateIds(filter: Document): Promise<Set<string> | null> {
|
|
||||||
await this.initialize();
|
|
||||||
|
|
||||||
const index = this.selectIndex(filter);
|
|
||||||
if (!index) return null;
|
|
||||||
|
|
||||||
// Try to use the index for equality matches
|
|
||||||
const indexFields = Object.keys(index.data.key);
|
|
||||||
const equalityValues: Record<string, any> = {};
|
|
||||||
|
|
||||||
for (const field of indexFields) {
|
|
||||||
const filterValue = this.getFilterValue(filter, field);
|
|
||||||
if (filterValue === undefined) break;
|
|
||||||
|
|
||||||
// Only use equality matches for index lookup
|
|
||||||
if (typeof filterValue === 'object' && filterValue !== null) {
|
|
||||||
if (filterValue.$eq !== undefined) {
|
|
||||||
equalityValues[field] = filterValue.$eq;
|
|
||||||
} else if (filterValue.$in !== undefined) {
|
|
||||||
// Handle $in with multiple lookups
|
|
||||||
const results = new Set<string>();
|
|
||||||
for (const val of filterValue.$in) {
|
|
||||||
equalityValues[field] = val;
|
|
||||||
const keyStr = JSON.stringify(this.buildKeyValue(equalityValues, index.data.key));
|
|
||||||
const ids = index.data.entries.get(keyStr);
|
|
||||||
if (ids) {
|
|
||||||
for (const id of ids) {
|
|
||||||
results.add(id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return results;
|
|
||||||
} else {
|
|
||||||
break; // Non-equality operator, stop here
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
equalityValues[field] = filterValue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Object.keys(equalityValues).length === 0) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const keyStr = JSON.stringify(this.buildKeyValue(equalityValues, index.data.key));
|
|
||||||
return index.data.entries.get(keyStr) || new Set();
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// Helper Methods
|
|
||||||
// ============================================================================
|
|
||||||
|
|
||||||
private generateIndexName(key: Record<string, any>): string {
|
|
||||||
return Object.entries(key)
|
|
||||||
.map(([field, dir]) => `${field}_${dir}`)
|
|
||||||
.join('_');
|
|
||||||
}
|
|
||||||
|
|
||||||
private extractKeyValue(doc: Document, key: Record<string, any>): any {
|
|
||||||
const values: any[] = [];
|
|
||||||
|
|
||||||
for (const field of Object.keys(key)) {
|
|
||||||
const value = QueryEngine.getNestedValue(doc, field);
|
|
||||||
values.push(value === undefined ? null : value);
|
|
||||||
}
|
|
||||||
|
|
||||||
// For single-field index, return the value directly
|
|
||||||
if (values.length === 1) {
|
|
||||||
return values[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
return values;
|
|
||||||
}
|
|
||||||
|
|
||||||
private buildKeyValue(values: Record<string, any>, key: Record<string, any>): any {
|
|
||||||
const result: any[] = [];
|
|
||||||
|
|
||||||
for (const field of Object.keys(key)) {
|
|
||||||
result.push(values[field] !== undefined ? values[field] : null);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (result.length === 1) {
|
|
||||||
return result[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
private getFilterFields(filter: Document, prefix = ''): string[] {
|
|
||||||
const fields: string[] = [];
|
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(filter)) {
|
|
||||||
if (key.startsWith('$')) {
|
|
||||||
// Logical operator
|
|
||||||
if (key === '$and' || key === '$or' || key === '$nor') {
|
|
||||||
for (const subFilter of value as Document[]) {
|
|
||||||
fields.push(...this.getFilterFields(subFilter, prefix));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
const fullKey = prefix ? `${prefix}.${key}` : key;
|
|
||||||
fields.push(fullKey);
|
|
||||||
|
|
||||||
// Check for nested filters
|
|
||||||
if (typeof value === 'object' && value !== null && !Array.isArray(value)) {
|
|
||||||
const subKeys = Object.keys(value);
|
|
||||||
if (subKeys.length > 0 && !subKeys[0].startsWith('$')) {
|
|
||||||
fields.push(...this.getFilterFields(value, fullKey));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return fields;
|
|
||||||
}
|
|
||||||
|
|
||||||
private getFilterValue(filter: Document, field: string): any {
|
|
||||||
// Handle dot notation
|
|
||||||
const parts = field.split('.');
|
|
||||||
let current: any = filter;
|
|
||||||
|
|
||||||
for (const part of parts) {
|
|
||||||
if (current === null || current === undefined) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
current = current[part];
|
|
||||||
}
|
|
||||||
|
|
||||||
return current;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Reference in New Issue
Block a user