Compare commits
18 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| e6a36ecb5f | |||
| 6a37a773ea | |||
| 1fff277698 | |||
| 0ad7f316c4 | |||
| 0d450e7d4e | |||
| fff77fbd8e | |||
| 678bf15eb4 | |||
| aa45e9579b | |||
| e3dc19aa7c | |||
| 316af45b5e | |||
| 6932059965 | |||
| bd1764159e | |||
| 12102255c4 | |||
| a0df731bc0 | |||
| 28e166ee35 | |||
| 06ada11b79 | |||
| 17195cfe1b | |||
| fcc5a0e557 |
90
changelog.md
90
changelog.md
@@ -1,5 +1,95 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 2026-02-03 - 4.3.0 - feat(docs)
|
||||||
|
add LocalTsmDb documentation and examples; update README code samples and imports; correct examples and variable names; update package author
|
||||||
|
|
||||||
|
- Introduce LocalTsmDb: zero-config local database with automatic persistence, auto port discovery, and pre-connected client (added Quick Start, API, Features, and testing examples).
|
||||||
|
- Expand comparison table to include LocalTsmDb alongside SmartMongo and TsmDB.
|
||||||
|
- Update README examples: new LocalTsmDb usage, reorder options (LocalTsmDb, TsmDB, SmartMongo), rename test DB variable (db -> testDb), and adjust test snippets for Jest/Mocha and tap.
|
||||||
|
- Adjust code snippets and API notes: switch some example imports to use tsmdb, replace FileStorageAdapter references, change planner.createPlan to await planner.plan, and use wal.getEntriesAfter(...) without awaiting.
|
||||||
|
- Update package.json author from 'Lossless GmbH' to 'Task Venture Capital GmbH'.
|
||||||
|
|
||||||
|
## 2026-02-03 - 4.2.1 - fix(package.json)
|
||||||
|
replace main and typings with exports field pointing to ./dist_ts/index.js
|
||||||
|
|
||||||
|
- Added package.json exports field mapping "." to ./dist_ts/index.js to declare the package entrypoint.
|
||||||
|
- Removed main (dist_ts/index.js) and typings (dist_ts/index.d.ts) entries.
|
||||||
|
- Note: switching to exports improves Node resolution but removing the typings entry may affect TypeScript consumers expecting index.d.ts.
|
||||||
|
|
||||||
|
## 2026-02-01 - 4.2.0 - feat(tsmdb)
|
||||||
|
implement TsmDB Mongo-wire-compatible server, add storage/engine modules and reorganize exports
|
||||||
|
|
||||||
|
- Add full TsmDB implementation under ts/ts_tsmdb: wire protocol, server, command router, handlers, engines (Query, Update, Aggregation, Index, Transaction, Session), storage adapters (Memory, File), OpLog, WAL, utils and types.
|
||||||
|
- Remove legacy ts/tsmdb implementation and replace with new ts_tsmdb module exports.
|
||||||
|
- Introduce ts/ts_mongotools module and move SmartMongo class there; update top-level exports in ts/index.ts to export SmartMongo, tsmdb (from ts_tsmdb) and LocalTsmDb.
|
||||||
|
- Add LocalTsmDb convenience class (ts/ts_local) to start a file-backed TsmDB and return a connected MongoClient.
|
||||||
|
- Refactor plugin imports into per-module plugins files and add utilities (checksum, persistence, query planner, index engine).
|
||||||
|
|
||||||
|
## 2026-02-01 - 4.1.1 - fix(tsmdb)
|
||||||
|
add comprehensive unit tests for tsmdb components: checksum, query planner, index engine, session, and WAL
|
||||||
|
|
||||||
|
- Add new tests: test.tsmdb.checksum.ts — CRC32 and document checksum utilities (add/verify/remove)
|
||||||
|
- Add new tests: test.tsmdb.queryplanner.ts — QueryPlanner plans, index usage, selectivity, explain output, and edge cases
|
||||||
|
- Add new tests: test.tsmdb.indexengine.ts — Index creation, unique/sparse options, candidate selection, and constraints
|
||||||
|
- Add new tests: test.tsmdb.session.ts — Session lifecycle, touch/refresh/close, extractSessionId handling
|
||||||
|
- Add new tests: test.tsmdb.wal.ts — WAL initialization, LSN increments, logging/recovery for inserts/updates/deletes, binary and nested data handling
|
||||||
|
- Tests only — no production API changes; increases test coverage
|
||||||
|
- Recommend patch bump from 4.1.0 to 4.1.1
|
||||||
|
|
||||||
|
## 2026-02-01 - 4.1.0 - feat(readme)
|
||||||
|
expand README with storage integrity, WAL, query planner, session & transaction docs; update test script to enable verbose logging and increase timeout
|
||||||
|
|
||||||
|
- Updated npm test script to run tstest with --verbose, --logfile and --timeout 60 to improve test output and avoid timeouts.
|
||||||
|
- Extensive README additions: file storage adapter examples with checksum options, write-ahead logging (WAL) usage and recovery, query planner examples, index and query execution details, session and transaction examples and features.
|
||||||
|
- Wire protocol / features table updated to include Transactions and Sessions and added admin commands (dbStats, collStats).
|
||||||
|
- Architecture diagram and component list updated to include QueryPlanner, SessionEngine, TransactionEngine and WAL; storage layer annotated with checksums and WAL.
|
||||||
|
- Minor example import tweak: MongoClient import now includes Db type in test examples.
|
||||||
|
|
||||||
|
## 2026-02-01 - 4.0.0 - BREAKING CHANGE(storage,engine,server)
|
||||||
|
add session & transaction management, index/query planner, WAL and checksum support; integrate index-accelerated queries and update storage API (findByIds) to enable index optimizations
|
||||||
|
|
||||||
|
- Add SessionEngine with session lifecycle, auto-abort of transactions on expiry and session tracking in CommandRouter and AdminHandler.
|
||||||
|
- Introduce TransactionEngine integrations in CommandRouter and AdminHandler; handlers now support start/commit/abort transaction workflows.
|
||||||
|
- Add IndexEngine enhancements including a simple B-tree and hash map optimizations; integrate index usage into Find/Count/Insert/Update/Delete handlers for index-accelerated queries and index maintenance on mutations.
|
||||||
|
- Add QueryPlanner to choose IXSCAN vs COLLSCAN and provide explain plans.
|
||||||
|
- Add WAL (write-ahead log) for durability, with LSNs, checkpoints and recovery APIs.
|
||||||
|
- Add checksum utilities and FileStorageAdapter support for checksums (enableChecksums/strictChecksums), with verification on read and optional strict failure behavior.
|
||||||
|
- IStorageAdapter interface changed to include findByIds; MemoryStorageAdapter and FileStorageAdapter implement findByIds to support index lookups.
|
||||||
|
- Exported API additions: WAL, QueryPlanner, SessionEngine, checksum utilities; CommandRouter now caches IndexEngines and exposes transaction/session engines.
|
||||||
|
- Breaking change: the IStorageAdapter interface change requires third-party storage adapters to implement the new findByIds method.
|
||||||
|
|
||||||
|
## 2026-02-01 - 3.0.0 - BREAKING CHANGE(tsmdb)
|
||||||
|
rename CongoDB to TsmDB and relocate/rename wire-protocol server implementation and public exports
|
||||||
|
|
||||||
|
- Project refactor renames the in-memory wire-protocol server from CongoDB -> TsmDB (identifiers, files and namespaces changed).
|
||||||
|
- ts/index.ts now exports tsmdb instead of congodb (public API change; consumers must update imports).
|
||||||
|
- All congodb sources under ts/congodb were removed and equivalent implementations added under ts/tsmdb (errors, engines, storage adapters, server, handlers, WireProtocol, types).
|
||||||
|
- Readme and usage examples updated to reference TsmDB/tsmdb and example code updated accordingly.
|
||||||
|
- Tests renamed/updated from test.congodb.ts -> test.tsmdb.ts to exercise the new tsmdb export and server.
|
||||||
|
|
||||||
|
## 2026-01-31 - 2.2.0 - feat(readme)
|
||||||
|
update README with expanded documentation covering CongoDB and SmartMongo, installation, quick start examples, architecture, usage examples, and legal/company information
|
||||||
|
|
||||||
|
- Completely expanded README: added detailed overview for SmartMongo and new CongoDB (wire-protocol server)
|
||||||
|
- Added Quick Start examples for both SmartMongo and CongoDB (TypeScript/ESM snippets)
|
||||||
|
- Included installation instructions for npm and pnpm and issue reporting/security guidance
|
||||||
|
- Added architecture diagram, example tests, and storage/engine descriptions
|
||||||
|
- Clarified license, trademark, and company contact information
|
||||||
|
- Large non-functional documentation-only change (+398 -44)
|
||||||
|
|
||||||
|
## 2026-01-31 - 2.1.0 - feat(congodb)
|
||||||
|
implement CongoDB MongoDB wire-protocol compatible in-memory server and APIs
|
||||||
|
|
||||||
|
- Add full congodb module: CongoServer, WireProtocol, CommandRouter and handlers (Hello, Insert, Find, Update, Delete, Aggregate, Index, Admin).
|
||||||
|
- Implement query/update/aggregation/index/transaction engines (QueryEngine, UpdateEngine, AggregationEngine, IndexEngine, TransactionEngine) and OpLog for change stream support.
|
||||||
|
- Add storage adapters: in-memory (MemoryStorageAdapter) and file-backed (FileStorageAdapter) with persistence and oplog support.
|
||||||
|
- Introduce types/interfaces and rich error classes (CongoErrors) plus congodb.plugins re-exports (bson, mingo, smartfs, smartpath, smartrx).
|
||||||
|
- Add many server-side utilities: IndexEngine, Aggregation helpers ($lookup, $graphLookup, $merge, $facet, $unionWith), cursor management and command routing.
|
||||||
|
- Add integration tests for CongoDB using official mongodb MongoClient (test/test.congodb.ts) and update unit test entry (test/test.ts) to use tstest tapbundle.
|
||||||
|
- Export congodb from ts/index.ts and update package.json: bump devDependencies, add runtime deps (mongodb, bson, mingo, mingo), add new @push.rocks/* deps and dev tool versions.
|
||||||
|
- Update readme.hints.md with CongoDB architecture, usage examples and supported commands.
|
||||||
|
- Update npmextra.json metadata and release/registry config and reorganize tsdoc mappings.
|
||||||
|
|
||||||
## 2025-11-17 - 2.0.14 - fix(smartmongo.plugins)
|
## 2025-11-17 - 2.0.14 - fix(smartmongo.plugins)
|
||||||
Use default import for mongodb-memory-server (Deno compatibility), update hints and bump package version to 2.0.13
|
Use default import for mongodb-memory-server (Deno compatibility), update hints and bump package version to 2.0.13
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"gitzone": {
|
"@git.zone/cli": {
|
||||||
"projectType": "npm",
|
"projectType": "npm",
|
||||||
"module": {
|
"module": {
|
||||||
"githost": "code.foss.global",
|
"githost": "code.foss.global",
|
||||||
@@ -18,13 +18,23 @@
|
|||||||
"database management",
|
"database management",
|
||||||
"typescript"
|
"typescript"
|
||||||
]
|
]
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"npmci": {
|
"release": {
|
||||||
"npmGlobalTools": [],
|
"registries": [
|
||||||
"npmAccessLevel": "public"
|
"https://verdaccio.lossless.digital",
|
||||||
|
"https://registry.npmjs.org"
|
||||||
|
],
|
||||||
|
"accessLevel": "public"
|
||||||
},
|
},
|
||||||
"tsdoc": {
|
"services": [
|
||||||
|
"mongodb",
|
||||||
|
"minio"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"@git.zone/tsdoc": {
|
||||||
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
||||||
|
},
|
||||||
|
"@ship.zone/szci": {
|
||||||
|
"npmGlobalTools": []
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
29
package.json
29
package.json
@@ -1,31 +1,36 @@
|
|||||||
{
|
{
|
||||||
"name": "@push.rocks/smartmongo",
|
"name": "@push.rocks/smartmongo",
|
||||||
"version": "2.0.14",
|
"version": "4.3.0",
|
||||||
"private": false,
|
"private": false,
|
||||||
"description": "A module for creating and managing a local MongoDB instance for testing purposes.",
|
"description": "A module for creating and managing a local MongoDB instance for testing purposes.",
|
||||||
"main": "dist_ts/index.js",
|
"exports": {
|
||||||
"typings": "dist_ts/index.d.ts",
|
".": "./dist_ts/index.js"
|
||||||
|
},
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"author": "Lossless GmbH",
|
"author": "Task Venture Capital GmbH",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "(tstest test/ --web)",
|
"test": "(tstest test/. --verbose --logfile --timeout 60)",
|
||||||
"build": "(tsbuild --web --allowimplicitany)",
|
"build": "(tsbuild --web)",
|
||||||
"buildDocs": "tsdoc"
|
"buildDocs": "tsdoc"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.1.66",
|
"@git.zone/tsbuild": "^4.1.2",
|
||||||
"@git.zone/tsbundle": "^2.0.8",
|
"@git.zone/tsbundle": "^2.8.3",
|
||||||
"@git.zone/tsrun": "^1.2.44",
|
"@git.zone/tsrun": "^2.0.1",
|
||||||
"@git.zone/tstest": "^1.0.77",
|
"@git.zone/tstest": "^3.1.8",
|
||||||
"@push.rocks/tapbundle": "^5.0.12",
|
"@types/node": "^25.1.0",
|
||||||
"@types/node": "^22.14.0"
|
"mongodb": "^7.0.0"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@push.rocks/mongodump": "^1.0.7",
|
"@push.rocks/mongodump": "^1.0.7",
|
||||||
"@push.rocks/smartdata": "^5.0.23",
|
"@push.rocks/smartdata": "^5.0.23",
|
||||||
|
"@push.rocks/smartfs": "^1.3.1",
|
||||||
"@push.rocks/smartpath": "^5.0.11",
|
"@push.rocks/smartpath": "^5.0.11",
|
||||||
"@push.rocks/smartpromise": "^4.0.3",
|
"@push.rocks/smartpromise": "^4.0.3",
|
||||||
|
"@push.rocks/smartrx": "^3.0.0",
|
||||||
|
"bson": "^6.10.0",
|
||||||
|
"mingo": "^7.2.0",
|
||||||
"mongodb-memory-server": "^10.1.4"
|
"mongodb-memory-server": "^10.1.4"
|
||||||
},
|
},
|
||||||
"browserslist": [
|
"browserslist": [
|
||||||
|
|||||||
6028
pnpm-lock.yaml
generated
6028
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -1,4 +0,0 @@
|
|||||||
onlyBuiltDependencies:
|
|
||||||
- esbuild
|
|
||||||
- mongodb-memory-server
|
|
||||||
- puppeteer
|
|
||||||
@@ -8,3 +8,77 @@
|
|||||||
- This works in both Node.js and Deno environments
|
- This works in both Node.js and Deno environments
|
||||||
- **Why:** Deno wraps CommonJS exports in a `default` property, so default imports are required
|
- **Why:** Deno wraps CommonJS exports in a `default` property, so default imports are required
|
||||||
- Fixed in version 2.0.13 (changed from `import * as mongoPlugin`)
|
- Fixed in version 2.0.13 (changed from `import * as mongoPlugin`)
|
||||||
|
|
||||||
|
## TsmDB - MongoDB Wire Protocol Server
|
||||||
|
|
||||||
|
### Architecture
|
||||||
|
TsmDB implements the MongoDB binary wire protocol (OP_MSG, OP_QUERY) allowing official MongoDB drivers to connect directly.
|
||||||
|
|
||||||
|
```
|
||||||
|
Official MongoClient → TCP (wire protocol) → TsmdbServer → Engines → Storage
|
||||||
|
(mongodb npm) OP_MSG/BSON (port)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Module Structure
|
||||||
|
```
|
||||||
|
ts/tsmdb/
|
||||||
|
├── server/ # Wire protocol server
|
||||||
|
│ ├── TsmdbServer.ts # TCP server, connection handling
|
||||||
|
│ ├── WireProtocol.ts # OP_MSG/OP_QUERY parsing & encoding
|
||||||
|
│ ├── CommandRouter.ts # Route commands to handlers
|
||||||
|
│ └── handlers/ # Command implementations
|
||||||
|
│ ├── HelloHandler.ts # hello/isMaster handshake
|
||||||
|
│ ├── FindHandler.ts # find, getMore, killCursors, count, distinct
|
||||||
|
│ ├── InsertHandler.ts # insert
|
||||||
|
│ ├── UpdateHandler.ts # update, findAndModify
|
||||||
|
│ ├── DeleteHandler.ts # delete
|
||||||
|
│ ├── AggregateHandler.ts # aggregate
|
||||||
|
│ ├── IndexHandler.ts # createIndexes, dropIndexes, listIndexes
|
||||||
|
│ └── AdminHandler.ts # ping, listDatabases, listCollections, etc.
|
||||||
|
│
|
||||||
|
├── engine/ # Core logic (reused)
|
||||||
|
│ ├── QueryEngine.ts # Query filtering with mingo
|
||||||
|
│ ├── UpdateEngine.ts # Update operations
|
||||||
|
│ ├── AggregationEngine.ts # Aggregation pipelines
|
||||||
|
│ ├── IndexEngine.ts # Index management
|
||||||
|
│ └── TransactionEngine.ts # Transaction support
|
||||||
|
│
|
||||||
|
├── storage/ # Storage layer
|
||||||
|
│ ├── IStorageAdapter.ts # Interface
|
||||||
|
│ ├── MemoryStorageAdapter.ts
|
||||||
|
│ └── FileStorageAdapter.ts
|
||||||
|
│
|
||||||
|
└── types/interfaces.ts # Type definitions
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage Example
|
||||||
|
```typescript
|
||||||
|
import { TsmdbServer } from '@push.rocks/smartmongo/tsmdb';
|
||||||
|
import { MongoClient } from 'mongodb';
|
||||||
|
|
||||||
|
// Start server
|
||||||
|
const server = new TsmdbServer({ port: 27117 });
|
||||||
|
await server.start();
|
||||||
|
|
||||||
|
// Connect with official MongoDB driver
|
||||||
|
const client = new MongoClient('mongodb://127.0.0.1:27117', {
|
||||||
|
directConnection: true
|
||||||
|
});
|
||||||
|
await client.connect();
|
||||||
|
|
||||||
|
// Use like any MongoDB instance
|
||||||
|
const db = client.db('mydb');
|
||||||
|
await db.collection('users').insertOne({ name: 'John' });
|
||||||
|
const user = await db.collection('users').findOne({ name: 'John' });
|
||||||
|
|
||||||
|
// Cleanup
|
||||||
|
await client.close();
|
||||||
|
await server.stop();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Supported Commands
|
||||||
|
- **Handshake**: hello, isMaster
|
||||||
|
- **CRUD**: find, insert, update, delete, findAndModify, getMore, killCursors
|
||||||
|
- **Aggregation**: aggregate, count, distinct
|
||||||
|
- **Indexes**: createIndexes, dropIndexes, listIndexes
|
||||||
|
- **Admin**: ping, listDatabases, listCollections, drop, dropDatabase, create, serverStatus, buildInfo
|
||||||
|
|||||||
659
readme.md
659
readme.md
@@ -1,104 +1,677 @@
|
|||||||
# @push.rocks/smartmongo
|
# @push.rocks/smartmongo
|
||||||
|
|
||||||
create a local mongodb for testing
|
A powerful MongoDB toolkit for testing and development — featuring a real MongoDB memory server (**SmartMongo**), an ultra-fast wire-protocol-compatible in-memory database server (**TsmDB**), and a zero-config local database (**LocalTsmDb**). 🚀
|
||||||
|
|
||||||
## Install
|
## Install
|
||||||
|
|
||||||
To start using @push.rocks/smartmongo in your project, you first need to install it via npm. You can do this by running the following command in your terminal:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
npm install @push.rocks/smartmongo --save-dev
|
npm install @push.rocks/smartmongo --save-dev
|
||||||
|
# or
|
||||||
|
pnpm add -D @push.rocks/smartmongo
|
||||||
```
|
```
|
||||||
|
|
||||||
This will add `@push.rocks/smartmongo` as a development dependency to your project because it's typically used for testing purposes.
|
## Issue Reporting and Security
|
||||||
|
|
||||||
## Usage
|
For reporting bugs, issues, or security vulnerabilities, please visit [community.foss.global/](https://community.foss.global/). This is the central community hub for all issue reporting. Developers who sign and comply with our contribution agreement and go through identification can also get a [code.foss.global/](https://code.foss.global/) account to submit Pull Requests directly.
|
||||||
|
|
||||||
The `@push.rocks/smartmongo` package provides a convenient way to spin up a local MongoDB instance, primarily for testing purposes. It's designed to simplify the process of configuring and managing a MongoDB replica set during development or in CI/CD pipelines. Below, we present a comprehensive guide on how to utilize the full feature set of this module, employing ESM syntax and TypeScript.
|
## Overview
|
||||||
|
|
||||||
### Setting Up
|
`@push.rocks/smartmongo` provides three powerful approaches for MongoDB in testing and development:
|
||||||
|
|
||||||
To get started, you must first import the `SmartMongo` class from the package. This class is responsible for handling the MongoDB instances.
|
| Feature | SmartMongo | TsmDB | LocalTsmDb |
|
||||||
|
|---------|------------|-------|------------|
|
||||||
|
| **Type** | Real MongoDB (memory server) | Wire protocol server | Zero-config local DB |
|
||||||
|
| **Speed** | ~2-5s startup | ⚡ Instant (~5ms) | ⚡ Instant + auto-connect |
|
||||||
|
| **Compatibility** | 100% MongoDB | MongoDB driver compatible | MongoDB driver compatible |
|
||||||
|
| **Dependencies** | Downloads MongoDB binary | Zero external deps | Zero external deps |
|
||||||
|
| **Replication** | ✅ Full replica set | Single node | Single node |
|
||||||
|
| **Persistence** | Dump to directory | Memory or file | File-based (automatic) |
|
||||||
|
| **Use Case** | Integration testing | Unit testing, CI/CD | Quick prototyping, local dev |
|
||||||
|
|
||||||
|
## 🚀 Quick Start
|
||||||
|
|
||||||
|
### Option 1: LocalTsmDb (Zero-Config Local Database) ⭐ NEW
|
||||||
|
|
||||||
|
The easiest way to get started — just point it at a folder and you have a persistent MongoDB-compatible database with automatic port discovery!
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { LocalTsmDb } from '@push.rocks/smartmongo';
|
||||||
|
|
||||||
|
// Create a local database backed by files
|
||||||
|
const db = new LocalTsmDb({ folderPath: './my-data' });
|
||||||
|
|
||||||
|
// Start and get a connected MongoDB client
|
||||||
|
const client = await db.start();
|
||||||
|
|
||||||
|
// Use exactly like MongoDB
|
||||||
|
const users = client.db('myapp').collection('users');
|
||||||
|
await users.insertOne({ name: 'Alice', email: 'alice@example.com' });
|
||||||
|
|
||||||
|
const user = await users.findOne({ name: 'Alice' });
|
||||||
|
console.log(user); // { _id: ObjectId(...), name: 'Alice', email: 'alice@example.com' }
|
||||||
|
|
||||||
|
// Data persists to disk automatically!
|
||||||
|
await db.stop();
|
||||||
|
|
||||||
|
// Later... data is still there
|
||||||
|
const db2 = new LocalTsmDb({ folderPath: './my-data' });
|
||||||
|
const client2 = await db2.start();
|
||||||
|
const savedUser = await client2.db('myapp').collection('users').findOne({ name: 'Alice' });
|
||||||
|
// savedUser exists!
|
||||||
|
```
|
||||||
|
|
||||||
|
### Option 2: TsmDB (Wire Protocol Server)
|
||||||
|
|
||||||
|
A lightweight, pure TypeScript MongoDB-compatible server — use the official `mongodb` driver directly!
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { tsmdb } from '@push.rocks/smartmongo';
|
||||||
|
import { MongoClient } from 'mongodb';
|
||||||
|
|
||||||
|
// Start TsmDB server
|
||||||
|
const server = new tsmdb.TsmdbServer({ port: 27017 });
|
||||||
|
await server.start();
|
||||||
|
|
||||||
|
// Connect with the official MongoDB driver
|
||||||
|
const client = new MongoClient('mongodb://127.0.0.1:27017');
|
||||||
|
await client.connect();
|
||||||
|
|
||||||
|
// Use exactly like real MongoDB
|
||||||
|
const db = client.db('myapp');
|
||||||
|
await db.collection('users').insertOne({ name: 'Alice', age: 30 });
|
||||||
|
|
||||||
|
const user = await db.collection('users').findOne({ name: 'Alice' });
|
||||||
|
console.log(user); // { _id: ObjectId(...), name: 'Alice', age: 30 }
|
||||||
|
|
||||||
|
// Clean up
|
||||||
|
await client.close();
|
||||||
|
await server.stop();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Option 3: SmartMongo (Real MongoDB)
|
||||||
|
|
||||||
|
Spin up a real MongoDB replica set in memory — perfect for integration tests that need full MongoDB compatibility.
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
import { SmartMongo } from '@push.rocks/smartmongo';
|
import { SmartMongo } from '@push.rocks/smartmongo';
|
||||||
|
|
||||||
|
// Start a MongoDB replica set
|
||||||
|
const mongo = await SmartMongo.createAndStart();
|
||||||
|
|
||||||
|
// Get connection details
|
||||||
|
const descriptor = await mongo.getMongoDescriptor();
|
||||||
|
console.log(descriptor.mongoDbUrl); // mongodb://127.0.0.1:xxxxx/...
|
||||||
|
|
||||||
|
// Use with your MongoDB client or ORM
|
||||||
|
// ... run your tests ...
|
||||||
|
|
||||||
|
// Clean up
|
||||||
|
await mongo.stop();
|
||||||
```
|
```
|
||||||
|
|
||||||
### Creating and Starting a MongoDB Instance
|
## 📖 LocalTsmDb API
|
||||||
|
|
||||||
With `SmartMongo`, you can easily create and start a MongoDB replica set. You can specify the number of replica instances; however, if not specified, it defaults to 1.
|
The simplest option for local development and prototyping — zero config, auto port discovery, and automatic persistence.
|
||||||
|
|
||||||
|
### Basic Usage
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
async function setupMongoDB() {
|
import { LocalTsmDb } from '@push.rocks/smartmongo';
|
||||||
const smartMongoInstance = await SmartMongo.createAndStart(1); // Number of replicas is optional
|
|
||||||
return smartMongoInstance;
|
|
||||||
}
|
|
||||||
|
|
||||||
const myDbInstance = await setupMongoDB();
|
const db = new LocalTsmDb({
|
||||||
|
folderPath: './data', // Required: where to store data
|
||||||
|
port: 27017, // Optional: defaults to auto-discovery
|
||||||
|
host: '127.0.0.1', // Optional: bind address
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start and get connected client
|
||||||
|
const client = await db.start();
|
||||||
|
|
||||||
|
// Access the underlying server if needed
|
||||||
|
const server = db.getServer();
|
||||||
|
const uri = db.getConnectionUri();
|
||||||
|
|
||||||
|
// Check status
|
||||||
|
console.log(db.running); // true
|
||||||
|
|
||||||
|
// Stop when done
|
||||||
|
await db.stop();
|
||||||
```
|
```
|
||||||
|
|
||||||
After invoking `createAndStart`, an instance of MongoDB is spun up and is ready for use. The `createAndStart` function returns a `SmartMongo` instance which can be interacted with for further operations.
|
### Features
|
||||||
|
|
||||||
### Accessing MongoDB Connection Information
|
- 🔍 **Auto Port Discovery** — Automatically finds an available port if 27017 is in use
|
||||||
|
- 💾 **Automatic Persistence** — Data saved to files, survives restarts
|
||||||
|
- 🔌 **Pre-connected Client** — `start()` returns a ready-to-use MongoDB client
|
||||||
|
- 🎯 **Zero Config** — Just specify a folder path and you're good to go
|
||||||
|
|
||||||
After instantiation, you might want to connect your application or test suite to the MongoDB instance. The `getMongoDescriptor` method facilitates this by providing essential connection details.
|
## 📖 SmartMongo API
|
||||||
|
|
||||||
|
### Creating an Instance
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
const mongoDescriptor = await myDbInstance.getMongoDescriptor();
|
import { SmartMongo } from '@push.rocks/smartmongo';
|
||||||
console.log(mongoDescriptor.mongoDbUrl); // Use this URL to connect with Mongoose or MongoDB clients.
|
|
||||||
|
// Default: single replica
|
||||||
|
const mongo = await SmartMongo.createAndStart();
|
||||||
|
|
||||||
|
// Multiple replicas for testing replication
|
||||||
|
const mongo = await SmartMongo.createAndStart(3);
|
||||||
```
|
```
|
||||||
|
|
||||||
### Stopping and Cleaning Up
|
### Getting Connection Details
|
||||||
|
|
||||||
Once your tests have completed or you're done using the MongoDB instance, it’s crucial to properly stop and clean up the resources. `@push.rocks/smartmongo` provides two methods for this purpose:
|
|
||||||
|
|
||||||
1. **stop()**: Stops the MongoDB instance without persisting any data.
|
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
await myDbInstance.stop();
|
const descriptor = await mongo.getMongoDescriptor();
|
||||||
|
// {
|
||||||
|
// mongoDbName: 'smartmongo_testdatabase',
|
||||||
|
// mongoDbUrl: 'mongodb://127.0.0.1:xxxxx/?replicaSet=testset'
|
||||||
|
// }
|
||||||
```
|
```
|
||||||
|
|
||||||
2. **stopAndDumpToDir(dirPath)**: Stops the MongoDB instance and persists the data to the specified directory. This is useful if you need to examine the data post-test or reuse it in subsequent runs.
|
### Stopping & Cleanup
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
await myDbInstance.stopAndDumpToDir('./path/to/dump');
|
// Simple stop (data discarded)
|
||||||
|
await mongo.stop();
|
||||||
|
|
||||||
|
// Stop and dump data to disk for inspection
|
||||||
|
await mongo.stopAndDumpToDir('./test-data');
|
||||||
|
|
||||||
|
// With custom file naming
|
||||||
|
await mongo.stopAndDumpToDir('./test-data', (doc) => `${doc.collection}-${doc._id}.bson`);
|
||||||
```
|
```
|
||||||
|
|
||||||
### Advanced Usage
|
## 🔧 TsmDB API
|
||||||
|
|
||||||
`@push.rocks/smartmongo` also provides advanced features for dumping the database and configuring MongoDB replica sets. These features can be particularly useful for complex testing scenarios or when specific MongoDB behaviors need to be emulated.
|
### Server Configuration
|
||||||
|
|
||||||
#### Dumping Data
|
|
||||||
|
|
||||||
To dump the MongoDB data for inspection or backup purposes, use the `stopAndDumpToDir` method. This method optionally takes a function to customize the naming scheme of the dumped files based on the document content.
|
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
await myDbInstance.stopAndDumpToDir('./path/to/dump', (doc) => {
|
import { tsmdb } from '@push.rocks/smartmongo';
|
||||||
return `customNameBasedOnDoc-${doc._id}.bson`;
|
|
||||||
|
const server = new tsmdb.TsmdbServer({
|
||||||
|
port: 27017, // Default MongoDB port
|
||||||
|
host: '127.0.0.1', // Bind address
|
||||||
|
storage: 'memory', // 'memory' or 'file'
|
||||||
|
storagePath: './data', // For file-based storage
|
||||||
|
});
|
||||||
|
|
||||||
|
await server.start();
|
||||||
|
console.log(server.getConnectionUri()); // mongodb://127.0.0.1:27017
|
||||||
|
|
||||||
|
// Server properties
|
||||||
|
console.log(server.running); // true
|
||||||
|
console.log(server.getUptime()); // seconds
|
||||||
|
console.log(server.getConnectionCount()); // active connections
|
||||||
|
|
||||||
|
await server.stop();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Supported MongoDB Operations
|
||||||
|
|
||||||
|
TsmDB supports the core MongoDB operations via the wire protocol:
|
||||||
|
|
||||||
|
#### 🔹 CRUD Operations
|
||||||
|
```typescript
|
||||||
|
// Insert
|
||||||
|
await collection.insertOne({ name: 'Bob' });
|
||||||
|
await collection.insertMany([{ a: 1 }, { a: 2 }]);
|
||||||
|
|
||||||
|
// Find
|
||||||
|
const doc = await collection.findOne({ name: 'Bob' });
|
||||||
|
const docs = await collection.find({ age: { $gte: 18 } }).toArray();
|
||||||
|
|
||||||
|
// Update
|
||||||
|
await collection.updateOne({ name: 'Bob' }, { $set: { age: 25 } });
|
||||||
|
await collection.updateMany({ active: false }, { $set: { archived: true } });
|
||||||
|
|
||||||
|
// Delete
|
||||||
|
await collection.deleteOne({ name: 'Bob' });
|
||||||
|
await collection.deleteMany({ archived: true });
|
||||||
|
|
||||||
|
// Replace
|
||||||
|
await collection.replaceOne({ _id: id }, { name: 'New Bob', age: 30 });
|
||||||
|
|
||||||
|
// Find and Modify
|
||||||
|
const result = await collection.findOneAndUpdate(
|
||||||
|
{ name: 'Bob' },
|
||||||
|
{ $inc: { visits: 1 } },
|
||||||
|
{ returnDocument: 'after' }
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 🔹 Query Operators
|
||||||
|
```typescript
|
||||||
|
// Comparison
|
||||||
|
{ age: { $eq: 25 } }
|
||||||
|
{ age: { $ne: 25 } }
|
||||||
|
{ age: { $gt: 18, $lt: 65 } }
|
||||||
|
{ age: { $gte: 18, $lte: 65 } }
|
||||||
|
{ status: { $in: ['active', 'pending'] } }
|
||||||
|
{ status: { $nin: ['deleted'] } }
|
||||||
|
|
||||||
|
// Logical
|
||||||
|
{ $and: [{ age: { $gte: 18 } }, { active: true }] }
|
||||||
|
{ $or: [{ status: 'active' }, { admin: true }] }
|
||||||
|
{ $not: { status: 'deleted' } }
|
||||||
|
|
||||||
|
// Element
|
||||||
|
{ email: { $exists: true } }
|
||||||
|
{ type: { $type: 'string' } }
|
||||||
|
|
||||||
|
// Array
|
||||||
|
{ tags: { $all: ['mongodb', 'database'] } }
|
||||||
|
{ scores: { $elemMatch: { $gte: 80, $lt: 90 } } }
|
||||||
|
{ tags: { $size: 3 } }
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 🔹 Update Operators
|
||||||
|
```typescript
|
||||||
|
{ $set: { name: 'New Name' } }
|
||||||
|
{ $unset: { tempField: '' } }
|
||||||
|
{ $inc: { count: 1 } }
|
||||||
|
{ $mul: { price: 1.1 } }
|
||||||
|
{ $min: { lowScore: 50 } }
|
||||||
|
{ $max: { highScore: 100 } }
|
||||||
|
{ $push: { tags: 'new-tag' } }
|
||||||
|
{ $pull: { tags: 'old-tag' } }
|
||||||
|
{ $addToSet: { tags: 'unique-tag' } }
|
||||||
|
{ $pop: { queue: 1 } } // Remove last
|
||||||
|
{ $pop: { queue: -1 } } // Remove first
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 🔹 Aggregation Pipeline
|
||||||
|
```typescript
|
||||||
|
const results = await collection.aggregate([
|
||||||
|
{ $match: { status: 'active' } },
|
||||||
|
{ $group: { _id: '$category', total: { $sum: '$amount' } } },
|
||||||
|
{ $sort: { total: -1 } },
|
||||||
|
{ $limit: 10 },
|
||||||
|
{ $project: { category: '$_id', total: 1, _id: 0 } }
|
||||||
|
]).toArray();
|
||||||
|
```
|
||||||
|
|
||||||
|
Supported stages: `$match`, `$project`, `$group`, `$sort`, `$limit`, `$skip`, `$unwind`, `$lookup`, `$addFields`, `$count`, `$facet`, and more.
|
||||||
|
|
||||||
|
#### 🔹 Index Operations
|
||||||
|
```typescript
|
||||||
|
await collection.createIndex({ email: 1 }, { unique: true });
|
||||||
|
await collection.createIndex({ name: 1, age: -1 });
|
||||||
|
const indexes = await collection.listIndexes().toArray();
|
||||||
|
await collection.dropIndex('email_1');
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 🔹 Database Operations
|
||||||
|
```typescript
|
||||||
|
// List databases
|
||||||
|
const dbs = await client.db().admin().listDatabases();
|
||||||
|
|
||||||
|
// List collections
|
||||||
|
const collections = await db.listCollections().toArray();
|
||||||
|
|
||||||
|
// Create/drop collections
|
||||||
|
await db.createCollection('newcollection');
|
||||||
|
await db.dropCollection('oldcollection');
|
||||||
|
|
||||||
|
// Drop database
|
||||||
|
await db.dropDatabase();
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 🔹 Count & Distinct
|
||||||
|
```typescript
|
||||||
|
// Count documents
|
||||||
|
const total = await collection.countDocuments({});
|
||||||
|
const active = await collection.countDocuments({ status: 'active' });
|
||||||
|
const estimated = await collection.estimatedDocumentCount();
|
||||||
|
|
||||||
|
// Distinct values
|
||||||
|
const departments = await collection.distinct('department');
|
||||||
|
const activeDepts = await collection.distinct('department', { status: 'active' });
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 🔹 Bulk Operations
|
||||||
|
```typescript
|
||||||
|
const result = await collection.bulkWrite([
|
||||||
|
{ insertOne: { document: { name: 'Bulk1' } } },
|
||||||
|
{ updateOne: { filter: { name: 'John' }, update: { $set: { bulk: true } } } },
|
||||||
|
{ deleteOne: { filter: { name: 'Expired' } } },
|
||||||
|
{ replaceOne: { filter: { _id: id }, replacement: { name: 'Replaced' } } }
|
||||||
|
]);
|
||||||
|
|
||||||
|
console.log(result.insertedCount); // 1
|
||||||
|
console.log(result.modifiedCount); // 1
|
||||||
|
console.log(result.deletedCount); // 1
|
||||||
|
```
|
||||||
|
|
||||||
|
### Storage Adapters
|
||||||
|
|
||||||
|
TsmDB supports pluggable storage with data integrity features:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// In-memory (default) - fast, data lost on stop
|
||||||
|
const server = new tsmdb.TsmdbServer({ storage: 'memory' });
|
||||||
|
|
||||||
|
// In-memory with persistence - periodic snapshots to disk
|
||||||
|
const server = new tsmdb.TsmdbServer({
|
||||||
|
storage: 'memory',
|
||||||
|
persistPath: './data/snapshot.json',
|
||||||
|
persistIntervalMs: 30000 // Save every 30 seconds
|
||||||
|
});
|
||||||
|
|
||||||
|
// File-based - persistent storage with optional checksums
|
||||||
|
import { tsmdb } from '@push.rocks/smartmongo';
|
||||||
|
|
||||||
|
const server = new tsmdb.TsmdbServer({
|
||||||
|
storage: 'file',
|
||||||
|
storagePath: './data/tsmdb'
|
||||||
});
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
Using `@push.rocks/smartmongo` significantly simplifies the process of managing MongoDB instances for local testing environments. It abstracts away the complexity of starting, operating, and tearing down MongoDB replica sets, allowing developers to focus on building and testing their applications.
|
## ⚡ Performance & Reliability Features
|
||||||
|
|
||||||
### Conclusion
|
TsmDB includes enterprise-grade features for robustness:
|
||||||
|
|
||||||
`@push.rocks/smartmongo` serves as a powerful tool in a developer's arsenal for efficiently configuring, running, and managing MongoDB instances in testing scenarios. By following the above guide, developers can leverage MongoDB in their projects with minimal setup and gain valuable insights into their applications' data interactions in a controlled and reproducible environment.
|
### 🔍 Index-Accelerated Queries
|
||||||
|
|
||||||
|
Indexes are automatically used to accelerate queries. Instead of scanning all documents, TsmDB uses:
|
||||||
|
|
||||||
|
- **Hash indexes** for equality queries (`$eq`, `$in`)
|
||||||
|
- **B-tree indexes** for range queries (`$gt`, `$gte`, `$lt`, `$lte`)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Create an index
|
||||||
|
await collection.createIndex({ email: 1 });
|
||||||
|
await collection.createIndex({ age: 1 });
|
||||||
|
|
||||||
|
// These queries will use the index (fast!)
|
||||||
|
await collection.findOne({ email: 'alice@example.com' }); // Uses hash lookup
|
||||||
|
await collection.find({ age: { $gte: 18, $lt: 65 } }); // Uses B-tree range scan
|
||||||
|
```
|
||||||
|
|
||||||
|
### 📊 Query Planner
|
||||||
|
|
||||||
|
TsmDB includes a query planner that analyzes queries and selects optimal execution strategies:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { tsmdb } from '@push.rocks/smartmongo';
|
||||||
|
|
||||||
|
// For debugging, you can access the query planner
|
||||||
|
const planner = new tsmdb.QueryPlanner(indexEngine);
|
||||||
|
const plan = await planner.plan(filter);
|
||||||
|
|
||||||
|
console.log(plan);
|
||||||
|
// {
|
||||||
|
// type: 'IXSCAN', // or 'IXSCAN_RANGE', 'COLLSCAN'
|
||||||
|
// indexName: 'email_1',
|
||||||
|
// selectivity: 0.01,
|
||||||
|
// indexCovering: true
|
||||||
|
// }
|
||||||
|
```
|
||||||
|
|
||||||
|
### 📝 Write-Ahead Logging (WAL)
|
||||||
|
|
||||||
|
For durability, TsmDB supports write-ahead logging:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { tsmdb } from '@push.rocks/smartmongo';
|
||||||
|
|
||||||
|
const wal = new tsmdb.WAL('./data/wal.log');
|
||||||
|
await wal.initialize();
|
||||||
|
|
||||||
|
// WAL entries include:
|
||||||
|
// - LSN (Log Sequence Number)
|
||||||
|
// - Timestamp
|
||||||
|
// - Operation type (insert, update, delete, checkpoint)
|
||||||
|
// - Document data (BSON serialized)
|
||||||
|
// - CRC32 checksum for integrity
|
||||||
|
|
||||||
|
// Recovery support
|
||||||
|
const entries = wal.getEntriesAfter(lastCheckpointLsn);
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🔐 Session Management
|
||||||
|
|
||||||
|
TsmDB tracks client sessions with automatic timeout and transaction linking:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Sessions are automatically managed when using the MongoDB driver
|
||||||
|
const session = client.startSession();
|
||||||
|
|
||||||
|
try {
|
||||||
|
session.startTransaction();
|
||||||
|
await collection.insertOne({ name: 'Alice' }, { session });
|
||||||
|
await collection.updateOne({ name: 'Bob' }, { $inc: { balance: 100 } }, { session });
|
||||||
|
await session.commitTransaction();
|
||||||
|
} catch (error) {
|
||||||
|
await session.abortTransaction();
|
||||||
|
} finally {
|
||||||
|
session.endSession();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Session features:
|
||||||
|
// - Automatic session timeout (30 minutes default)
|
||||||
|
// - Transaction auto-abort on session expiry
|
||||||
|
// - Session activity tracking
|
||||||
|
```
|
||||||
|
|
||||||
|
### ✅ Data Integrity Checksums
|
||||||
|
|
||||||
|
File-based storage supports CRC32 checksums to detect corruption:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { tsmdb } from '@push.rocks/smartmongo';
|
||||||
|
|
||||||
|
// Checksums are used internally for WAL and data integrity
|
||||||
|
// Documents are checksummed on write, verified on read
|
||||||
|
```
|
||||||
|
|
||||||
|
### 📋 Supported Wire Protocol Commands
|
||||||
|
|
||||||
|
| Category | Commands |
|
||||||
|
|----------|----------|
|
||||||
|
| **Handshake** | `hello`, `isMaster` |
|
||||||
|
| **CRUD** | `find`, `insert`, `update`, `delete`, `findAndModify`, `getMore`, `killCursors` |
|
||||||
|
| **Aggregation** | `aggregate`, `count`, `distinct` |
|
||||||
|
| **Indexes** | `createIndexes`, `dropIndexes`, `listIndexes` |
|
||||||
|
| **Transactions** | `startTransaction`, `commitTransaction`, `abortTransaction` |
|
||||||
|
| **Sessions** | `startSession`, `endSessions` |
|
||||||
|
| **Admin** | `ping`, `listDatabases`, `listCollections`, `drop`, `dropDatabase`, `create`, `serverStatus`, `buildInfo`, `dbStats`, `collStats` |
|
||||||
|
|
||||||
|
TsmDB supports MongoDB wire protocol versions 0-21, compatible with MongoDB 3.6 through 7.0 drivers.
|
||||||
|
|
||||||
|
## 🧪 Testing Examples
|
||||||
|
|
||||||
|
### Jest/Mocha with LocalTsmDb
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { LocalTsmDb } from '@push.rocks/smartmongo';
|
||||||
|
import { MongoClient, Db } from 'mongodb';
|
||||||
|
|
||||||
|
let db: LocalTsmDb;
|
||||||
|
let client: MongoClient;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
db = new LocalTsmDb({ folderPath: './test-data' });
|
||||||
|
client = await db.start();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await db.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
// Clean slate for each test
|
||||||
|
await client.db('test').dropDatabase();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should insert and find user', async () => {
|
||||||
|
const users = client.db('test').collection('users');
|
||||||
|
await users.insertOne({ name: 'Alice', email: 'alice@example.com' });
|
||||||
|
|
||||||
|
const user = await users.findOne({ name: 'Alice' });
|
||||||
|
expect(user?.email).toBe('alice@example.com');
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### Jest/Mocha with TsmDB
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { tsmdb } from '@push.rocks/smartmongo';
|
||||||
|
import { MongoClient, Db } from 'mongodb';
|
||||||
|
|
||||||
|
let server: tsmdb.TsmdbServer;
|
||||||
|
let client: MongoClient;
|
||||||
|
let testDb: Db;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
server = new tsmdb.TsmdbServer({ port: 27117 });
|
||||||
|
await server.start();
|
||||||
|
|
||||||
|
client = new MongoClient('mongodb://127.0.0.1:27117');
|
||||||
|
await client.connect();
|
||||||
|
testDb = client.db('test');
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await client.close();
|
||||||
|
await server.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
await testDb.dropDatabase();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should insert and find user', async () => {
|
||||||
|
const users = testDb.collection('users');
|
||||||
|
await users.insertOne({ name: 'Alice', email: 'alice@example.com' });
|
||||||
|
|
||||||
|
const user = await users.findOne({ name: 'Alice' });
|
||||||
|
expect(user?.email).toBe('alice@example.com');
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### With @push.rocks/tapbundle
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import { LocalTsmDb } from '@push.rocks/smartmongo';
|
||||||
|
|
||||||
|
let db: LocalTsmDb;
|
||||||
|
|
||||||
|
tap.test('setup', async () => {
|
||||||
|
db = new LocalTsmDb({ folderPath: './test-data' });
|
||||||
|
await db.start();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should perform CRUD operations', async () => {
|
||||||
|
const client = db.getClient();
|
||||||
|
const col = client.db('test').collection('items');
|
||||||
|
|
||||||
|
// Create
|
||||||
|
const result = await col.insertOne({ name: 'Widget', price: 9.99 });
|
||||||
|
expect(result.insertedId).toBeTruthy();
|
||||||
|
|
||||||
|
// Read
|
||||||
|
const item = await col.findOne({ name: 'Widget' });
|
||||||
|
expect(item?.price).toEqual(9.99);
|
||||||
|
|
||||||
|
// Update
|
||||||
|
await col.updateOne({ name: 'Widget' }, { $set: { price: 12.99 } });
|
||||||
|
const updated = await col.findOne({ name: 'Widget' });
|
||||||
|
expect(updated?.price).toEqual(12.99);
|
||||||
|
|
||||||
|
// Delete
|
||||||
|
await col.deleteOne({ name: 'Widget' });
|
||||||
|
const deleted = await col.findOne({ name: 'Widget' });
|
||||||
|
expect(deleted).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('teardown', async () => {
|
||||||
|
await db.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
|
```
|
||||||
|
|
||||||
|
## 🏗️ Architecture
|
||||||
|
|
||||||
|
### Module Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
@push.rocks/smartmongo
|
||||||
|
├── SmartMongo → Real MongoDB memory server (mongodb-memory-server wrapper)
|
||||||
|
├── tsmdb → Wire protocol server with full engine stack
|
||||||
|
└── LocalTsmDb → Zero-config local database (convenience wrapper)
|
||||||
|
```
|
||||||
|
|
||||||
|
### TsmDB Wire Protocol Stack
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────────────────────────┐
|
||||||
|
│ Official MongoDB Driver │
|
||||||
|
│ (mongodb npm) │
|
||||||
|
└─────────────────────────┬───────────────────────────────────┘
|
||||||
|
│ TCP + OP_MSG/BSON
|
||||||
|
▼
|
||||||
|
┌─────────────────────────────────────────────────────────────┐
|
||||||
|
│ TsmdbServer │
|
||||||
|
│ ┌──────────────┐ ┌──────────────┐ ┌──────────────────┐ │
|
||||||
|
│ │ WireProtocol │→ │CommandRouter │→ │ Handlers │ │
|
||||||
|
│ │ (OP_MSG) │ │ │ │ (Find, Insert..) │ │
|
||||||
|
│ └──────────────┘ └──────────────┘ └──────────────────┘ │
|
||||||
|
└─────────────────────────┬───────────────────────────────────┘
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
┌─────────────────────────────────────────────────────────────┐
|
||||||
|
│ Engines │
|
||||||
|
│ ┌─────────┐ ┌────────┐ ┌───────────┐ ┌───────┐ ┌───────┐ │
|
||||||
|
│ │ Query │ │ Update │ │Aggregation│ │ Index │ │Session│ │
|
||||||
|
│ │ Planner │ │ Engine │ │ Engine │ │Engine │ │Engine │ │
|
||||||
|
│ └─────────┘ └────────┘ └───────────┘ └───────┘ └───────┘ │
|
||||||
|
│ ┌──────────────────────┐ │
|
||||||
|
│ │ Transaction Engine │ │
|
||||||
|
│ └──────────────────────┘ │
|
||||||
|
└─────────────────────────┬───────────────────────────────────┘
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
┌─────────────────────────────────────────────────────────────┐
|
||||||
|
│ Storage Layer │
|
||||||
|
│ ┌──────────────────┐ ┌──────────────────┐ ┌──────────┐ │
|
||||||
|
│ │ MemoryStorage │ │ FileStorage │ │ WAL │ │
|
||||||
|
│ │ │ │ (+ Checksums) │ │ │ │
|
||||||
|
│ └──────────────────┘ └──────────────────┘ └──────────┘ │
|
||||||
|
└─────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
### Key Components
|
||||||
|
|
||||||
|
| Component | Description |
|
||||||
|
|-----------|-------------|
|
||||||
|
| **WireProtocol** | Parses MongoDB OP_MSG binary protocol |
|
||||||
|
| **CommandRouter** | Routes commands to appropriate handlers |
|
||||||
|
| **QueryPlanner** | Analyzes queries and selects execution strategy |
|
||||||
|
| **IndexEngine** | Manages B-tree and hash indexes |
|
||||||
|
| **SessionEngine** | Tracks client sessions and timeouts |
|
||||||
|
| **TransactionEngine** | Handles ACID transaction semantics |
|
||||||
|
| **WAL** | Write-ahead logging for durability |
|
||||||
|
|
||||||
## License and Legal Information
|
## License and Legal Information
|
||||||
|
|
||||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
This repository contains open-source code licensed under the MIT License. A copy of the license can be found in the [LICENSE](./LICENSE) file.
|
||||||
|
|
||||||
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
### Trademarks
|
### Trademarks
|
||||||
|
|
||||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
|
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH or third parties, and are not included within the scope of the MIT license granted herein.
|
||||||
|
|
||||||
|
Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines or the guidelines of the respective third-party owners, and any usage must be approved in writing. Third-party trademarks used herein are the property of their respective owners and used only in a descriptive manner, e.g. for an implementation of an API or similar.
|
||||||
|
|
||||||
### Company Information
|
### Company Information
|
||||||
|
|
||||||
Task Venture Capital GmbH
|
Task Venture Capital GmbH
|
||||||
Registered at District court Bremen HRB 35230 HB, Germany
|
Registered at District Court Bremen HRB 35230 HB, Germany
|
||||||
|
|
||||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
For any legal inquiries or further information, please contact us via email at hello@task.vc.
|
||||||
|
|
||||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { expect, tap } from '@push.rocks/tapbundle';
|
import { tap } from '@git.zone/tstest/tapbundle';
|
||||||
import * as smartmongo from '../ts/index.js';
|
import * as smartmongo from '../ts/index.js';
|
||||||
|
|
||||||
let smartmongoInstance: smartmongo.SmartMongo;
|
let smartmongoInstance: smartmongo.SmartMongo;
|
||||||
@@ -11,4 +11,4 @@ tap.test('should stop the instance', async () => {
|
|||||||
await smartmongoInstance.stopAndDumpToDir('.nogit/');
|
await smartmongoInstance.stopAndDumpToDir('.nogit/');
|
||||||
});
|
});
|
||||||
|
|
||||||
tap.start();
|
export default tap.start();
|
||||||
|
|||||||
232
test/test.tsmdb.checksum.ts
Normal file
232
test/test.tsmdb.checksum.ts
Normal file
@@ -0,0 +1,232 @@
|
|||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as smartmongo from '../ts/index.js';
|
||||||
|
|
||||||
|
const {
|
||||||
|
calculateCRC32,
|
||||||
|
calculateCRC32Buffer,
|
||||||
|
calculateDocumentChecksum,
|
||||||
|
addChecksum,
|
||||||
|
verifyChecksum,
|
||||||
|
removeChecksum,
|
||||||
|
} = smartmongo.tsmdb;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// CRC32 String Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32 should return consistent value for same input', async () => {
|
||||||
|
const result1 = calculateCRC32('hello world');
|
||||||
|
const result2 = calculateCRC32('hello world');
|
||||||
|
expect(result1).toEqual(result2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32 should return different values for different inputs', async () => {
|
||||||
|
const result1 = calculateCRC32('hello');
|
||||||
|
const result2 = calculateCRC32('world');
|
||||||
|
expect(result1).not.toEqual(result2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32 should return a 32-bit unsigned integer', async () => {
|
||||||
|
const result = calculateCRC32('test string');
|
||||||
|
expect(result).toBeGreaterThanOrEqual(0);
|
||||||
|
expect(result).toBeLessThanOrEqual(0xFFFFFFFF);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32 should handle empty string', async () => {
|
||||||
|
const result = calculateCRC32('');
|
||||||
|
expect(typeof result).toEqual('number');
|
||||||
|
expect(result).toBeGreaterThanOrEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32 should handle special characters', async () => {
|
||||||
|
const result = calculateCRC32('hello\nworld\t!"#$%&\'()');
|
||||||
|
expect(typeof result).toEqual('number');
|
||||||
|
expect(result).toBeGreaterThanOrEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32 should handle unicode characters', async () => {
|
||||||
|
const result = calculateCRC32('hello 世界 🌍');
|
||||||
|
expect(typeof result).toEqual('number');
|
||||||
|
expect(result).toBeGreaterThanOrEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// CRC32 Buffer Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32Buffer should return consistent value for same input', async () => {
|
||||||
|
const buffer = Buffer.from('hello world');
|
||||||
|
const result1 = calculateCRC32Buffer(buffer);
|
||||||
|
const result2 = calculateCRC32Buffer(buffer);
|
||||||
|
expect(result1).toEqual(result2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32Buffer should return different values for different inputs', async () => {
|
||||||
|
const buffer1 = Buffer.from('hello');
|
||||||
|
const buffer2 = Buffer.from('world');
|
||||||
|
const result1 = calculateCRC32Buffer(buffer1);
|
||||||
|
const result2 = calculateCRC32Buffer(buffer2);
|
||||||
|
expect(result1).not.toEqual(result2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32Buffer should handle empty buffer', async () => {
|
||||||
|
const result = calculateCRC32Buffer(Buffer.from(''));
|
||||||
|
expect(typeof result).toEqual('number');
|
||||||
|
expect(result).toBeGreaterThanOrEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateCRC32Buffer should handle binary data', async () => {
|
||||||
|
const buffer = Buffer.from([0x00, 0xFF, 0x7F, 0x80, 0x01]);
|
||||||
|
const result = calculateCRC32Buffer(buffer);
|
||||||
|
expect(typeof result).toEqual('number');
|
||||||
|
expect(result).toBeGreaterThanOrEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Document Checksum Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('checksum: calculateDocumentChecksum should return consistent value', async () => {
|
||||||
|
const doc = { name: 'John', age: 30 };
|
||||||
|
const result1 = calculateDocumentChecksum(doc);
|
||||||
|
const result2 = calculateDocumentChecksum(doc);
|
||||||
|
expect(result1).toEqual(result2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateDocumentChecksum should exclude _checksum field', async () => {
|
||||||
|
const doc1 = { name: 'John', age: 30 };
|
||||||
|
const doc2 = { name: 'John', age: 30, _checksum: 12345 };
|
||||||
|
const result1 = calculateDocumentChecksum(doc1);
|
||||||
|
const result2 = calculateDocumentChecksum(doc2);
|
||||||
|
expect(result1).toEqual(result2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateDocumentChecksum should handle empty document', async () => {
|
||||||
|
const result = calculateDocumentChecksum({});
|
||||||
|
expect(typeof result).toEqual('number');
|
||||||
|
expect(result).toBeGreaterThanOrEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateDocumentChecksum should handle nested objects', async () => {
|
||||||
|
const doc = {
|
||||||
|
name: 'John',
|
||||||
|
address: {
|
||||||
|
street: '123 Main St',
|
||||||
|
city: 'Springfield',
|
||||||
|
zip: {
|
||||||
|
code: '12345',
|
||||||
|
plus4: '6789',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const result = calculateDocumentChecksum(doc);
|
||||||
|
expect(typeof result).toEqual('number');
|
||||||
|
expect(result).toBeGreaterThanOrEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: calculateDocumentChecksum should handle arrays', async () => {
|
||||||
|
const doc = {
|
||||||
|
name: 'John',
|
||||||
|
tags: ['developer', 'tester', 'admin'],
|
||||||
|
scores: [95, 87, 92],
|
||||||
|
};
|
||||||
|
const result = calculateDocumentChecksum(doc);
|
||||||
|
expect(typeof result).toEqual('number');
|
||||||
|
expect(result).toBeGreaterThanOrEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Add/Verify/Remove Checksum Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('checksum: addChecksum should add _checksum field to document', async () => {
|
||||||
|
const doc = { name: 'John', age: 30 };
|
||||||
|
const docWithChecksum = addChecksum(doc);
|
||||||
|
|
||||||
|
expect('_checksum' in docWithChecksum).toBeTrue();
|
||||||
|
expect(typeof docWithChecksum._checksum).toEqual('number');
|
||||||
|
expect(docWithChecksum.name).toEqual('John');
|
||||||
|
expect(docWithChecksum.age).toEqual(30);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: addChecksum should not modify the original document', async () => {
|
||||||
|
const doc = { name: 'John', age: 30 };
|
||||||
|
addChecksum(doc);
|
||||||
|
expect('_checksum' in doc).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: verifyChecksum should return true for valid checksum', async () => {
|
||||||
|
const doc = { name: 'John', age: 30 };
|
||||||
|
const docWithChecksum = addChecksum(doc);
|
||||||
|
const isValid = verifyChecksum(docWithChecksum);
|
||||||
|
expect(isValid).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: verifyChecksum should return false for tampered document', async () => {
|
||||||
|
const doc = { name: 'John', age: 30 };
|
||||||
|
const docWithChecksum = addChecksum(doc);
|
||||||
|
|
||||||
|
// Tamper with the document
|
||||||
|
docWithChecksum.age = 31;
|
||||||
|
|
||||||
|
const isValid = verifyChecksum(docWithChecksum);
|
||||||
|
expect(isValid).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: verifyChecksum should return false for wrong checksum', async () => {
|
||||||
|
const doc = { name: 'John', age: 30, _checksum: 12345 };
|
||||||
|
const isValid = verifyChecksum(doc);
|
||||||
|
expect(isValid).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: verifyChecksum should return true for document without checksum', async () => {
|
||||||
|
const doc = { name: 'John', age: 30 };
|
||||||
|
const isValid = verifyChecksum(doc);
|
||||||
|
expect(isValid).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: removeChecksum should remove _checksum field', async () => {
|
||||||
|
const doc = { name: 'John', age: 30 };
|
||||||
|
const docWithChecksum = addChecksum(doc);
|
||||||
|
const docWithoutChecksum = removeChecksum(docWithChecksum);
|
||||||
|
|
||||||
|
expect('_checksum' in docWithoutChecksum).toBeFalse();
|
||||||
|
expect(docWithoutChecksum.name).toEqual('John');
|
||||||
|
expect(docWithoutChecksum.age).toEqual(30);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('checksum: removeChecksum should handle document without checksum', async () => {
|
||||||
|
const doc = { name: 'John', age: 30 };
|
||||||
|
const result = removeChecksum(doc);
|
||||||
|
|
||||||
|
expect('_checksum' in result).toBeFalse();
|
||||||
|
expect(result.name).toEqual('John');
|
||||||
|
expect(result.age).toEqual(30);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Round-trip Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('checksum: full round-trip - add, verify, remove', async () => {
|
||||||
|
const original = { name: 'Test', value: 42, nested: { a: 1, b: 2 } };
|
||||||
|
|
||||||
|
// Add checksum
|
||||||
|
const withChecksum = addChecksum(original);
|
||||||
|
expect('_checksum' in withChecksum).toBeTrue();
|
||||||
|
|
||||||
|
// Verify checksum
|
||||||
|
expect(verifyChecksum(withChecksum)).toBeTrue();
|
||||||
|
|
||||||
|
// Remove checksum
|
||||||
|
const restored = removeChecksum(withChecksum);
|
||||||
|
expect('_checksum' in restored).toBeFalse();
|
||||||
|
|
||||||
|
// Original data should be intact
|
||||||
|
expect(restored.name).toEqual('Test');
|
||||||
|
expect(restored.value).toEqual(42);
|
||||||
|
expect(restored.nested.a).toEqual(1);
|
||||||
|
expect(restored.nested.b).toEqual(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
417
test/test.tsmdb.indexengine.ts
Normal file
417
test/test.tsmdb.indexengine.ts
Normal file
@@ -0,0 +1,417 @@
|
|||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as smartmongo from '../ts/index.js';
|
||||||
|
|
||||||
|
const { IndexEngine, MemoryStorageAdapter, ObjectId } = smartmongo.tsmdb;
|
||||||
|
|
||||||
|
let storage: InstanceType<typeof MemoryStorageAdapter>;
|
||||||
|
let indexEngine: InstanceType<typeof IndexEngine>;
|
||||||
|
|
||||||
|
const TEST_DB = 'testdb';
|
||||||
|
const TEST_COLL = 'indextest';
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Setup
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: should create IndexEngine instance', async () => {
|
||||||
|
storage = new MemoryStorageAdapter();
|
||||||
|
await storage.initialize();
|
||||||
|
await storage.createCollection(TEST_DB, TEST_COLL);
|
||||||
|
|
||||||
|
indexEngine = new IndexEngine(TEST_DB, TEST_COLL, storage);
|
||||||
|
expect(indexEngine).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Index Creation Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: createIndex should create single-field index', async () => {
|
||||||
|
const indexName = await indexEngine.createIndex({ name: 1 });
|
||||||
|
|
||||||
|
expect(indexName).toEqual('name_1');
|
||||||
|
|
||||||
|
const exists = await indexEngine.indexExists('name_1');
|
||||||
|
expect(exists).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: createIndex should create compound index', async () => {
|
||||||
|
const indexName = await indexEngine.createIndex({ city: 1, state: -1 });
|
||||||
|
|
||||||
|
expect(indexName).toEqual('city_1_state_-1');
|
||||||
|
|
||||||
|
const exists = await indexEngine.indexExists('city_1_state_-1');
|
||||||
|
expect(exists).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: createIndex should use custom name if provided', async () => {
|
||||||
|
const indexName = await indexEngine.createIndex({ email: 1 }, { name: 'custom_email_index' });
|
||||||
|
|
||||||
|
expect(indexName).toEqual('custom_email_index');
|
||||||
|
|
||||||
|
const exists = await indexEngine.indexExists('custom_email_index');
|
||||||
|
expect(exists).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: createIndex should handle unique option', async () => {
|
||||||
|
const indexName = await indexEngine.createIndex({ uniqueField: 1 }, { unique: true });
|
||||||
|
|
||||||
|
expect(indexName).toEqual('uniqueField_1');
|
||||||
|
|
||||||
|
const indexes = await indexEngine.listIndexes();
|
||||||
|
const uniqueIndex = indexes.find(i => i.name === 'uniqueField_1');
|
||||||
|
expect(uniqueIndex!.unique).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: createIndex should handle sparse option', async () => {
|
||||||
|
const indexName = await indexEngine.createIndex({ sparseField: 1 }, { sparse: true });
|
||||||
|
|
||||||
|
expect(indexName).toEqual('sparseField_1');
|
||||||
|
|
||||||
|
const indexes = await indexEngine.listIndexes();
|
||||||
|
const sparseIndex = indexes.find(i => i.name === 'sparseField_1');
|
||||||
|
expect(sparseIndex!.sparse).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: createIndex should return existing index name if already exists', async () => {
|
||||||
|
const indexName1 = await indexEngine.createIndex({ existingField: 1 }, { name: 'existing_idx' });
|
||||||
|
const indexName2 = await indexEngine.createIndex({ existingField: 1 }, { name: 'existing_idx' });
|
||||||
|
|
||||||
|
expect(indexName1).toEqual(indexName2);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Index Listing and Existence Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: listIndexes should return all indexes', async () => {
|
||||||
|
const indexes = await indexEngine.listIndexes();
|
||||||
|
|
||||||
|
expect(indexes.length).toBeGreaterThanOrEqual(5); // _id_ + created indexes
|
||||||
|
expect(indexes.some(i => i.name === '_id_')).toBeTrue();
|
||||||
|
expect(indexes.some(i => i.name === 'name_1')).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: indexExists should return true for existing index', async () => {
|
||||||
|
const exists = await indexEngine.indexExists('name_1');
|
||||||
|
expect(exists).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: indexExists should return false for non-existent index', async () => {
|
||||||
|
const exists = await indexEngine.indexExists('nonexistent_index');
|
||||||
|
expect(exists).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Document Operations and Index Updates
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: should insert documents for index testing', async () => {
|
||||||
|
// Create a fresh index engine for document operations
|
||||||
|
await storage.dropCollection(TEST_DB, TEST_COLL);
|
||||||
|
await storage.createCollection(TEST_DB, TEST_COLL);
|
||||||
|
|
||||||
|
indexEngine = new IndexEngine(TEST_DB, TEST_COLL, storage);
|
||||||
|
|
||||||
|
// Create indexes first
|
||||||
|
await indexEngine.createIndex({ age: 1 });
|
||||||
|
await indexEngine.createIndex({ category: 1 });
|
||||||
|
|
||||||
|
// Insert test documents
|
||||||
|
const docs = [
|
||||||
|
{ _id: new ObjectId(), name: 'Alice', age: 25, category: 'A' },
|
||||||
|
{ _id: new ObjectId(), name: 'Bob', age: 30, category: 'B' },
|
||||||
|
{ _id: new ObjectId(), name: 'Charlie', age: 35, category: 'A' },
|
||||||
|
{ _id: new ObjectId(), name: 'Diana', age: 28, category: 'C' },
|
||||||
|
{ _id: new ObjectId(), name: 'Eve', age: 30, category: 'B' },
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const doc of docs) {
|
||||||
|
const stored = await storage.insertOne(TEST_DB, TEST_COLL, doc);
|
||||||
|
await indexEngine.onInsert(stored);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: onInsert should update indexes', async () => {
|
||||||
|
const newDoc = {
|
||||||
|
_id: new ObjectId(),
|
||||||
|
name: 'Frank',
|
||||||
|
age: 40,
|
||||||
|
category: 'D',
|
||||||
|
};
|
||||||
|
|
||||||
|
const stored = await storage.insertOne(TEST_DB, TEST_COLL, newDoc);
|
||||||
|
await indexEngine.onInsert(stored);
|
||||||
|
|
||||||
|
// Find by the indexed field
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ age: 40 });
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
expect(candidates!.size).toEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: onUpdate should update indexes correctly', async () => {
|
||||||
|
// Get an existing document
|
||||||
|
const docs = await storage.findAll(TEST_DB, TEST_COLL);
|
||||||
|
const oldDoc = docs.find(d => d.name === 'Alice')!;
|
||||||
|
|
||||||
|
// Update the document
|
||||||
|
const newDoc = { ...oldDoc, age: 26 };
|
||||||
|
await storage.updateById(TEST_DB, TEST_COLL, oldDoc._id, newDoc);
|
||||||
|
await indexEngine.onUpdate(oldDoc, newDoc);
|
||||||
|
|
||||||
|
// Old value should not be in index
|
||||||
|
const oldCandidates = await indexEngine.findCandidateIds({ age: 25 });
|
||||||
|
expect(oldCandidates).toBeTruthy();
|
||||||
|
expect(oldCandidates!.has(oldDoc._id.toHexString())).toBeFalse();
|
||||||
|
|
||||||
|
// New value should be in index
|
||||||
|
const newCandidates = await indexEngine.findCandidateIds({ age: 26 });
|
||||||
|
expect(newCandidates).toBeTruthy();
|
||||||
|
expect(newCandidates!.has(oldDoc._id.toHexString())).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: onDelete should remove from indexes', async () => {
|
||||||
|
const docs = await storage.findAll(TEST_DB, TEST_COLL);
|
||||||
|
const docToDelete = docs.find(d => d.name === 'Frank')!;
|
||||||
|
|
||||||
|
await storage.deleteById(TEST_DB, TEST_COLL, docToDelete._id);
|
||||||
|
await indexEngine.onDelete(docToDelete);
|
||||||
|
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ age: 40 });
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
expect(candidates!.has(docToDelete._id.toHexString())).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// findCandidateIds Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: findCandidateIds with equality filter', async () => {
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ age: 30 });
|
||||||
|
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
expect(candidates!.size).toEqual(2); // Bob and Eve both have age 30
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: findCandidateIds with $in filter', async () => {
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ age: { $in: [28, 30] } });
|
||||||
|
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
expect(candidates!.size).toEqual(3); // Diana (28), Bob (30), Eve (30)
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: findCandidateIds with no matching index', async () => {
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ nonIndexedField: 'value' });
|
||||||
|
|
||||||
|
// Should return null when no index can be used
|
||||||
|
expect(candidates).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: findCandidateIds with empty filter', async () => {
|
||||||
|
const candidates = await indexEngine.findCandidateIds({});
|
||||||
|
|
||||||
|
// Empty filter = no index can be used
|
||||||
|
expect(candidates).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Range Query Tests (B-Tree)
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: findCandidateIds with $gt', async () => {
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ age: { $gt: 30 } });
|
||||||
|
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
// Charlie (35) is > 30
|
||||||
|
expect(candidates!.size).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: findCandidateIds with $lt', async () => {
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ age: { $lt: 28 } });
|
||||||
|
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
// Alice (26) is < 28
|
||||||
|
expect(candidates!.size).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: findCandidateIds with $gte', async () => {
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ age: { $gte: 30 } });
|
||||||
|
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
// Bob (30), Eve (30), Charlie (35)
|
||||||
|
expect(candidates!.size).toBeGreaterThanOrEqual(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: findCandidateIds with $lte', async () => {
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ age: { $lte: 28 } });
|
||||||
|
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
// Alice (26), Diana (28)
|
||||||
|
expect(candidates!.size).toBeGreaterThanOrEqual(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: findCandidateIds with range $gt and $lt', async () => {
|
||||||
|
const candidates = await indexEngine.findCandidateIds({ age: { $gt: 26, $lt: 35 } });
|
||||||
|
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
// Diana (28), Bob (30), Eve (30) are between 26 and 35 exclusive
|
||||||
|
expect(candidates!.size).toBeGreaterThanOrEqual(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Index Selection Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: selectIndex should return best index for equality', async () => {
|
||||||
|
const result = indexEngine.selectIndex({ age: 30 });
|
||||||
|
|
||||||
|
expect(result).toBeTruthy();
|
||||||
|
expect(result!.name).toEqual('age_1');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: selectIndex should return best index for range query', async () => {
|
||||||
|
const result = indexEngine.selectIndex({ age: { $gt: 25 } });
|
||||||
|
|
||||||
|
expect(result).toBeTruthy();
|
||||||
|
expect(result!.name).toEqual('age_1');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: selectIndex should return null for no matching filter', async () => {
|
||||||
|
const result = indexEngine.selectIndex({ nonIndexedField: 'value' });
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: selectIndex should return null for empty filter', async () => {
|
||||||
|
const result = indexEngine.selectIndex({});
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: selectIndex should prefer more specific indexes', async () => {
|
||||||
|
// Create a compound index
|
||||||
|
await indexEngine.createIndex({ age: 1, category: 1 }, { name: 'age_category_compound' });
|
||||||
|
|
||||||
|
// Query that matches compound index
|
||||||
|
const result = indexEngine.selectIndex({ age: 30, category: 'B' });
|
||||||
|
|
||||||
|
expect(result).toBeTruthy();
|
||||||
|
// Should prefer the compound index since it covers more fields
|
||||||
|
expect(result!.name).toEqual('age_category_compound');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Drop Index Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: dropIndex should remove the index', async () => {
|
||||||
|
await indexEngine.createIndex({ dropTest: 1 }, { name: 'drop_test_idx' });
|
||||||
|
expect(await indexEngine.indexExists('drop_test_idx')).toBeTrue();
|
||||||
|
|
||||||
|
await indexEngine.dropIndex('drop_test_idx');
|
||||||
|
expect(await indexEngine.indexExists('drop_test_idx')).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: dropIndex should throw for _id index', async () => {
|
||||||
|
let threw = false;
|
||||||
|
try {
|
||||||
|
await indexEngine.dropIndex('_id_');
|
||||||
|
} catch (e) {
|
||||||
|
threw = true;
|
||||||
|
}
|
||||||
|
expect(threw).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: dropIndex should throw for non-existent index', async () => {
|
||||||
|
let threw = false;
|
||||||
|
try {
|
||||||
|
await indexEngine.dropIndex('nonexistent_index');
|
||||||
|
} catch (e) {
|
||||||
|
threw = true;
|
||||||
|
}
|
||||||
|
expect(threw).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('indexengine: dropAllIndexes should remove all indexes except _id', async () => {
|
||||||
|
// Create some indexes to drop
|
||||||
|
await indexEngine.createIndex({ toDrop1: 1 });
|
||||||
|
await indexEngine.createIndex({ toDrop2: 1 });
|
||||||
|
|
||||||
|
await indexEngine.dropAllIndexes();
|
||||||
|
|
||||||
|
const indexes = await indexEngine.listIndexes();
|
||||||
|
expect(indexes.length).toEqual(1);
|
||||||
|
expect(indexes[0].name).toEqual('_id_');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Unique Index Constraint Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: unique index should prevent duplicate inserts', async () => {
|
||||||
|
// Create fresh collection
|
||||||
|
await storage.dropCollection(TEST_DB, 'uniquetest');
|
||||||
|
await storage.createCollection(TEST_DB, 'uniquetest');
|
||||||
|
|
||||||
|
const uniqueIndexEngine = new IndexEngine(TEST_DB, 'uniquetest', storage);
|
||||||
|
await uniqueIndexEngine.createIndex({ email: 1 }, { unique: true });
|
||||||
|
|
||||||
|
// Insert first document
|
||||||
|
const doc1 = { _id: new ObjectId(), email: 'test@example.com', name: 'Test' };
|
||||||
|
const stored1 = await storage.insertOne(TEST_DB, 'uniquetest', doc1);
|
||||||
|
await uniqueIndexEngine.onInsert(stored1);
|
||||||
|
|
||||||
|
// Try to insert duplicate
|
||||||
|
const doc2 = { _id: new ObjectId(), email: 'test@example.com', name: 'Test2' };
|
||||||
|
const stored2 = await storage.insertOne(TEST_DB, 'uniquetest', doc2);
|
||||||
|
|
||||||
|
let threw = false;
|
||||||
|
try {
|
||||||
|
await uniqueIndexEngine.onInsert(stored2);
|
||||||
|
} catch (e: any) {
|
||||||
|
threw = true;
|
||||||
|
expect(e.message).toContain('duplicate key');
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(threw).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Sparse Index Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: sparse index should not include documents without the field', async () => {
|
||||||
|
// Create fresh collection
|
||||||
|
await storage.dropCollection(TEST_DB, 'sparsetest');
|
||||||
|
await storage.createCollection(TEST_DB, 'sparsetest');
|
||||||
|
|
||||||
|
const sparseIndexEngine = new IndexEngine(TEST_DB, 'sparsetest', storage);
|
||||||
|
await sparseIndexEngine.createIndex({ optionalField: 1 }, { sparse: true });
|
||||||
|
|
||||||
|
// Insert doc with the field
|
||||||
|
const doc1 = { _id: new ObjectId(), optionalField: 'hasValue', name: 'HasField' };
|
||||||
|
const stored1 = await storage.insertOne(TEST_DB, 'sparsetest', doc1);
|
||||||
|
await sparseIndexEngine.onInsert(stored1);
|
||||||
|
|
||||||
|
// Insert doc without the field
|
||||||
|
const doc2 = { _id: new ObjectId(), name: 'NoField' };
|
||||||
|
const stored2 = await storage.insertOne(TEST_DB, 'sparsetest', doc2);
|
||||||
|
await sparseIndexEngine.onInsert(stored2);
|
||||||
|
|
||||||
|
// Search for documents with the field
|
||||||
|
const candidates = await sparseIndexEngine.findCandidateIds({ optionalField: 'hasValue' });
|
||||||
|
expect(candidates).toBeTruthy();
|
||||||
|
expect(candidates!.size).toEqual(1);
|
||||||
|
expect(candidates!.has(stored1._id.toHexString())).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Cleanup
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('indexengine: cleanup', async () => {
|
||||||
|
await storage.close();
|
||||||
|
expect(true).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
273
test/test.tsmdb.queryplanner.ts
Normal file
273
test/test.tsmdb.queryplanner.ts
Normal file
@@ -0,0 +1,273 @@
|
|||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as smartmongo from '../ts/index.js';
|
||||||
|
|
||||||
|
const { QueryPlanner, IndexEngine, MemoryStorageAdapter, ObjectId } = smartmongo.tsmdb;
|
||||||
|
|
||||||
|
let storage: InstanceType<typeof MemoryStorageAdapter>;
|
||||||
|
let indexEngine: InstanceType<typeof IndexEngine>;
|
||||||
|
let queryPlanner: InstanceType<typeof QueryPlanner>;
|
||||||
|
|
||||||
|
const TEST_DB = 'testdb';
|
||||||
|
const TEST_COLL = 'testcoll';
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Setup
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: should create QueryPlanner instance', async () => {
|
||||||
|
storage = new MemoryStorageAdapter();
|
||||||
|
await storage.initialize();
|
||||||
|
await storage.createCollection(TEST_DB, TEST_COLL);
|
||||||
|
|
||||||
|
indexEngine = new IndexEngine(TEST_DB, TEST_COLL, storage);
|
||||||
|
queryPlanner = new QueryPlanner(indexEngine);
|
||||||
|
|
||||||
|
expect(queryPlanner).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: should insert test documents', async () => {
|
||||||
|
// Insert test documents
|
||||||
|
const docs = [
|
||||||
|
{ _id: new ObjectId(), name: 'Alice', age: 25, city: 'NYC', category: 'A' },
|
||||||
|
{ _id: new ObjectId(), name: 'Bob', age: 30, city: 'LA', category: 'B' },
|
||||||
|
{ _id: new ObjectId(), name: 'Charlie', age: 35, city: 'NYC', category: 'A' },
|
||||||
|
{ _id: new ObjectId(), name: 'Diana', age: 28, city: 'Chicago', category: 'C' },
|
||||||
|
{ _id: new ObjectId(), name: 'Eve', age: 32, city: 'LA', category: 'B' },
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const doc of docs) {
|
||||||
|
await storage.insertOne(TEST_DB, TEST_COLL, doc);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Basic Plan Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: empty filter should result in COLLSCAN', async () => {
|
||||||
|
const plan = await queryPlanner.plan({});
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('COLLSCAN');
|
||||||
|
expect(plan.indexCovering).toBeFalse();
|
||||||
|
expect(plan.selectivity).toEqual(1.0);
|
||||||
|
expect(plan.explanation).toContain('No filter');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: null filter should result in COLLSCAN', async () => {
|
||||||
|
const plan = await queryPlanner.plan(null as any);
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('COLLSCAN');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: filter with no matching index should result in COLLSCAN', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ nonExistentField: 'value' });
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('COLLSCAN');
|
||||||
|
expect(plan.explanation).toContain('No suitable index');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Index Scan Tests (with indexes)
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: should create test indexes', async () => {
|
||||||
|
await indexEngine.createIndex({ age: 1 }, { name: 'age_1' });
|
||||||
|
await indexEngine.createIndex({ name: 1 }, { name: 'name_1' });
|
||||||
|
await indexEngine.createIndex({ city: 1, category: 1 }, { name: 'city_category_1' });
|
||||||
|
|
||||||
|
const indexes = await indexEngine.listIndexes();
|
||||||
|
expect(indexes.length).toBeGreaterThanOrEqual(4); // _id_ + 3 created
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: simple equality filter should use IXSCAN', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ age: 30 });
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('IXSCAN');
|
||||||
|
expect(plan.indexName).toEqual('age_1');
|
||||||
|
expect(plan.indexFieldsUsed).toContain('age');
|
||||||
|
expect(plan.usesRange).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: $eq operator should use IXSCAN', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ name: { $eq: 'Alice' } });
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('IXSCAN');
|
||||||
|
expect(plan.indexName).toEqual('name_1');
|
||||||
|
expect(plan.indexFieldsUsed).toContain('name');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: range filter ($gt) should use IXSCAN_RANGE', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ age: { $gt: 25 } });
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('IXSCAN_RANGE');
|
||||||
|
expect(plan.indexName).toEqual('age_1');
|
||||||
|
expect(plan.usesRange).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: range filter ($lt) should use IXSCAN_RANGE', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ age: { $lt: 35 } });
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('IXSCAN_RANGE');
|
||||||
|
expect(plan.usesRange).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: range filter ($gte, $lte) should use IXSCAN_RANGE', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ age: { $gte: 25, $lte: 35 } });
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('IXSCAN_RANGE');
|
||||||
|
expect(plan.usesRange).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: $in operator should use IXSCAN', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ age: { $in: [25, 30, 35] } });
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('IXSCAN');
|
||||||
|
expect(plan.indexName).toEqual('age_1');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Compound Index Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: compound index - first field equality should use index', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ city: 'NYC' });
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('IXSCAN');
|
||||||
|
expect(plan.indexName).toEqual('city_category_1');
|
||||||
|
expect(plan.indexFieldsUsed).toContain('city');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: compound index - both fields should use full index', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ city: 'NYC', category: 'A' });
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('IXSCAN');
|
||||||
|
expect(plan.indexName).toEqual('city_category_1');
|
||||||
|
expect(plan.indexFieldsUsed).toContain('city');
|
||||||
|
expect(plan.indexFieldsUsed).toContain('category');
|
||||||
|
expect(plan.indexFieldsUsed.length).toEqual(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Selectivity Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: equality query should have low selectivity', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ age: 30 });
|
||||||
|
|
||||||
|
expect(plan.selectivity).toBeLessThan(0.1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: range query should have moderate selectivity', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ age: { $gt: 25 } });
|
||||||
|
|
||||||
|
expect(plan.selectivity).toBeGreaterThan(0);
|
||||||
|
expect(plan.selectivity).toBeLessThan(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: $in query selectivity depends on array size', async () => {
|
||||||
|
const smallInPlan = await queryPlanner.plan({ age: { $in: [25] } });
|
||||||
|
const largeInPlan = await queryPlanner.plan({ age: { $in: [25, 26, 27, 28, 29, 30] } });
|
||||||
|
|
||||||
|
// Larger $in should have higher selectivity (less selective = more documents)
|
||||||
|
expect(largeInPlan.selectivity).toBeGreaterThanOrEqual(smallInPlan.selectivity);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Index Covering Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: query covering all filter fields should be index covering', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ age: 30 });
|
||||||
|
|
||||||
|
// All filter fields are covered by the index
|
||||||
|
expect(plan.indexCovering).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: query with residual filter should not be index covering', async () => {
|
||||||
|
const plan = await queryPlanner.plan({ city: 'NYC', name: 'Alice' });
|
||||||
|
|
||||||
|
// 'name' is not in the compound index city_category, so it's residual
|
||||||
|
expect(plan.indexCovering).toBeFalse();
|
||||||
|
expect(plan.residualFilter).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Explain Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: explain should return detailed plan info', async () => {
|
||||||
|
const explanation = await queryPlanner.explain({ age: 30 });
|
||||||
|
|
||||||
|
expect(explanation.queryPlanner).toBeTruthy();
|
||||||
|
expect(explanation.queryPlanner.plannerVersion).toEqual(1);
|
||||||
|
expect(explanation.queryPlanner.winningPlan).toBeTruthy();
|
||||||
|
expect(explanation.queryPlanner.rejectedPlans).toBeArray();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: explain should include winning and rejected plans', async () => {
|
||||||
|
const explanation = await queryPlanner.explain({ age: 30 });
|
||||||
|
|
||||||
|
expect(explanation.queryPlanner.winningPlan.type).toBeTruthy();
|
||||||
|
expect(explanation.queryPlanner.rejectedPlans.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: explain winning plan should be the best plan', async () => {
|
||||||
|
const explanation = await queryPlanner.explain({ age: 30 });
|
||||||
|
|
||||||
|
// Winning plan should use an index, not collection scan (if index exists)
|
||||||
|
expect(explanation.queryPlanner.winningPlan.type).toEqual('IXSCAN');
|
||||||
|
|
||||||
|
// There should be a COLLSCAN in rejected plans
|
||||||
|
const hasCOLLSCAN = explanation.queryPlanner.rejectedPlans.some(p => p.type === 'COLLSCAN');
|
||||||
|
expect(hasCOLLSCAN).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// $and Operator Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: $and conditions should be analyzed', async () => {
|
||||||
|
const plan = await queryPlanner.plan({
|
||||||
|
$and: [
|
||||||
|
{ age: { $gte: 25 } },
|
||||||
|
{ age: { $lte: 35 } },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(plan.type).toEqual('IXSCAN_RANGE');
|
||||||
|
expect(plan.indexName).toEqual('age_1');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Edge Cases
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: should handle complex nested operators', async () => {
|
||||||
|
const plan = await queryPlanner.plan({
|
||||||
|
age: { $gte: 20, $lte: 40 },
|
||||||
|
city: 'NYC',
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(plan).toBeTruthy();
|
||||||
|
expect(plan.type).not.toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('queryplanner: should handle $exists operator', async () => {
|
||||||
|
await indexEngine.createIndex({ email: 1 }, { name: 'email_1', sparse: true });
|
||||||
|
|
||||||
|
const plan = await queryPlanner.plan({ email: { $exists: true } });
|
||||||
|
|
||||||
|
// $exists can use sparse indexes
|
||||||
|
expect(plan).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Cleanup
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('queryplanner: cleanup', async () => {
|
||||||
|
await storage.close();
|
||||||
|
expect(true).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
361
test/test.tsmdb.session.ts
Normal file
361
test/test.tsmdb.session.ts
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as smartmongo from '../ts/index.js';
|
||||||
|
|
||||||
|
const { SessionEngine } = smartmongo.tsmdb;
|
||||||
|
|
||||||
|
let sessionEngine: InstanceType<typeof SessionEngine>;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Setup
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('session: should create SessionEngine instance', async () => {
|
||||||
|
sessionEngine = new SessionEngine({
|
||||||
|
sessionTimeoutMs: 1000, // 1 second for testing
|
||||||
|
cleanupIntervalMs: 10000, // 10 seconds to avoid cleanup during tests
|
||||||
|
});
|
||||||
|
expect(sessionEngine).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Session Lifecycle Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('session: startSession should create session with auto-generated ID', async () => {
|
||||||
|
const session = sessionEngine.startSession();
|
||||||
|
|
||||||
|
expect(session).toBeTruthy();
|
||||||
|
expect(session.id).toBeTruthy();
|
||||||
|
expect(session.id.length).toBeGreaterThanOrEqual(32); // UUID hex string (32 or 36 with hyphens)
|
||||||
|
expect(session.createdAt).toBeGreaterThan(0);
|
||||||
|
expect(session.lastActivityAt).toBeGreaterThan(0);
|
||||||
|
expect(session.inTransaction).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: startSession should create session with specified ID', async () => {
|
||||||
|
const customId = 'custom-session-id-12345';
|
||||||
|
const session = sessionEngine.startSession(customId);
|
||||||
|
|
||||||
|
expect(session.id).toEqual(customId);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: startSession should create session with metadata', async () => {
|
||||||
|
const metadata = { client: 'test-client', version: '1.0' };
|
||||||
|
const session = sessionEngine.startSession(undefined, metadata);
|
||||||
|
|
||||||
|
expect(session.metadata).toBeTruthy();
|
||||||
|
expect(session.metadata!.client).toEqual('test-client');
|
||||||
|
expect(session.metadata!.version).toEqual('1.0');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getSession should return session by ID', async () => {
|
||||||
|
const created = sessionEngine.startSession('get-session-test');
|
||||||
|
const retrieved = sessionEngine.getSession('get-session-test');
|
||||||
|
|
||||||
|
expect(retrieved).toBeTruthy();
|
||||||
|
expect(retrieved!.id).toEqual('get-session-test');
|
||||||
|
expect(retrieved!.id).toEqual(created.id);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getSession should return undefined for non-existent session', async () => {
|
||||||
|
const session = sessionEngine.getSession('non-existent-session-id');
|
||||||
|
expect(session).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: touchSession should update lastActivityAt', async () => {
|
||||||
|
const session = sessionEngine.startSession('touch-test-session');
|
||||||
|
const originalLastActivity = session.lastActivityAt;
|
||||||
|
|
||||||
|
// Wait a bit to ensure time difference
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 10));
|
||||||
|
|
||||||
|
const touched = sessionEngine.touchSession('touch-test-session');
|
||||||
|
expect(touched).toBeTrue();
|
||||||
|
|
||||||
|
const updated = sessionEngine.getSession('touch-test-session');
|
||||||
|
expect(updated!.lastActivityAt).toBeGreaterThanOrEqual(originalLastActivity);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: touchSession should return false for non-existent session', async () => {
|
||||||
|
const touched = sessionEngine.touchSession('non-existent-touch-session');
|
||||||
|
expect(touched).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: endSession should remove the session', async () => {
|
||||||
|
sessionEngine.startSession('end-session-test');
|
||||||
|
expect(sessionEngine.getSession('end-session-test')).toBeTruthy();
|
||||||
|
|
||||||
|
const ended = await sessionEngine.endSession('end-session-test');
|
||||||
|
expect(ended).toBeTrue();
|
||||||
|
|
||||||
|
expect(sessionEngine.getSession('end-session-test')).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: endSession should return false for non-existent session', async () => {
|
||||||
|
const ended = await sessionEngine.endSession('non-existent-end-session');
|
||||||
|
expect(ended).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Session Expiry Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('session: isSessionExpired should return false for fresh session', async () => {
|
||||||
|
const session = sessionEngine.startSession('fresh-session');
|
||||||
|
const isExpired = sessionEngine.isSessionExpired(session);
|
||||||
|
expect(isExpired).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: isSessionExpired should return true for old session', async () => {
|
||||||
|
// Create a session with old lastActivityAt
|
||||||
|
const session = sessionEngine.startSession('old-session');
|
||||||
|
// Manually set lastActivityAt to old value (sessionTimeoutMs is 1000ms)
|
||||||
|
(session as any).lastActivityAt = Date.now() - 2000;
|
||||||
|
|
||||||
|
const isExpired = sessionEngine.isSessionExpired(session);
|
||||||
|
expect(isExpired).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getSession should return undefined for expired session', async () => {
|
||||||
|
const session = sessionEngine.startSession('expiring-session');
|
||||||
|
// Manually expire the session
|
||||||
|
(session as any).lastActivityAt = Date.now() - 2000;
|
||||||
|
|
||||||
|
const retrieved = sessionEngine.getSession('expiring-session');
|
||||||
|
expect(retrieved).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Transaction Integration Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('session: startTransaction should mark session as in transaction', async () => {
|
||||||
|
sessionEngine.startSession('txn-session-1');
|
||||||
|
const started = sessionEngine.startTransaction('txn-session-1', 'txn-id-1', 1);
|
||||||
|
|
||||||
|
expect(started).toBeTrue();
|
||||||
|
|
||||||
|
const session = sessionEngine.getSession('txn-session-1');
|
||||||
|
expect(session!.inTransaction).toBeTrue();
|
||||||
|
expect(session!.txnId).toEqual('txn-id-1');
|
||||||
|
expect(session!.txnNumber).toEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: startTransaction should return false for non-existent session', async () => {
|
||||||
|
const started = sessionEngine.startTransaction('non-existent-txn-session', 'txn-id');
|
||||||
|
expect(started).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: endTransaction should clear transaction state', async () => {
|
||||||
|
sessionEngine.startSession('txn-session-2');
|
||||||
|
sessionEngine.startTransaction('txn-session-2', 'txn-id-2');
|
||||||
|
|
||||||
|
const ended = sessionEngine.endTransaction('txn-session-2');
|
||||||
|
expect(ended).toBeTrue();
|
||||||
|
|
||||||
|
const session = sessionEngine.getSession('txn-session-2');
|
||||||
|
expect(session!.inTransaction).toBeFalse();
|
||||||
|
expect(session!.txnId).toBeUndefined();
|
||||||
|
expect(session!.txnNumber).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: endTransaction should return false for non-existent session', async () => {
|
||||||
|
const ended = sessionEngine.endTransaction('non-existent-end-txn-session');
|
||||||
|
expect(ended).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getTransactionId should return transaction ID', async () => {
|
||||||
|
sessionEngine.startSession('txn-id-session');
|
||||||
|
sessionEngine.startTransaction('txn-id-session', 'my-txn-id');
|
||||||
|
|
||||||
|
const txnId = sessionEngine.getTransactionId('txn-id-session');
|
||||||
|
expect(txnId).toEqual('my-txn-id');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getTransactionId should return undefined for session without transaction', async () => {
|
||||||
|
sessionEngine.startSession('no-txn-session');
|
||||||
|
const txnId = sessionEngine.getTransactionId('no-txn-session');
|
||||||
|
expect(txnId).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getTransactionId should return undefined for non-existent session', async () => {
|
||||||
|
const txnId = sessionEngine.getTransactionId('non-existent-txn-id-session');
|
||||||
|
expect(txnId).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: isInTransaction should return correct state', async () => {
|
||||||
|
sessionEngine.startSession('in-txn-check-session');
|
||||||
|
|
||||||
|
expect(sessionEngine.isInTransaction('in-txn-check-session')).toBeFalse();
|
||||||
|
|
||||||
|
sessionEngine.startTransaction('in-txn-check-session', 'txn-check');
|
||||||
|
expect(sessionEngine.isInTransaction('in-txn-check-session')).toBeTrue();
|
||||||
|
|
||||||
|
sessionEngine.endTransaction('in-txn-check-session');
|
||||||
|
expect(sessionEngine.isInTransaction('in-txn-check-session')).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: isInTransaction should return false for non-existent session', async () => {
|
||||||
|
expect(sessionEngine.isInTransaction('non-existent-in-txn-session')).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Session Listing Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('session: listSessions should return all active sessions', async () => {
|
||||||
|
// Close and recreate to have a clean slate
|
||||||
|
sessionEngine.close();
|
||||||
|
sessionEngine = new SessionEngine({
|
||||||
|
sessionTimeoutMs: 10000,
|
||||||
|
cleanupIntervalMs: 60000,
|
||||||
|
});
|
||||||
|
|
||||||
|
sessionEngine.startSession('list-session-1');
|
||||||
|
sessionEngine.startSession('list-session-2');
|
||||||
|
sessionEngine.startSession('list-session-3');
|
||||||
|
|
||||||
|
const sessions = sessionEngine.listSessions();
|
||||||
|
expect(sessions.length).toEqual(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: listSessions should not include expired sessions', async () => {
|
||||||
|
const session = sessionEngine.startSession('expired-list-session');
|
||||||
|
// Expire the session
|
||||||
|
(session as any).lastActivityAt = Date.now() - 20000;
|
||||||
|
|
||||||
|
const sessions = sessionEngine.listSessions();
|
||||||
|
const found = sessions.find(s => s.id === 'expired-list-session');
|
||||||
|
expect(found).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getSessionCount should return correct count', async () => {
|
||||||
|
const count = sessionEngine.getSessionCount();
|
||||||
|
expect(count).toBeGreaterThanOrEqual(3); // We created 3 sessions above
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getSessionsWithTransactions should filter correctly', async () => {
|
||||||
|
// Clean slate
|
||||||
|
sessionEngine.close();
|
||||||
|
sessionEngine = new SessionEngine({
|
||||||
|
sessionTimeoutMs: 10000,
|
||||||
|
cleanupIntervalMs: 60000,
|
||||||
|
});
|
||||||
|
|
||||||
|
sessionEngine.startSession('no-txn-1');
|
||||||
|
sessionEngine.startSession('no-txn-2');
|
||||||
|
sessionEngine.startSession('with-txn-1');
|
||||||
|
sessionEngine.startSession('with-txn-2');
|
||||||
|
|
||||||
|
sessionEngine.startTransaction('with-txn-1', 'txn-a');
|
||||||
|
sessionEngine.startTransaction('with-txn-2', 'txn-b');
|
||||||
|
|
||||||
|
const txnSessions = sessionEngine.getSessionsWithTransactions();
|
||||||
|
expect(txnSessions.length).toEqual(2);
|
||||||
|
expect(txnSessions.every(s => s.inTransaction)).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// getOrCreateSession Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('session: getOrCreateSession should create if missing', async () => {
|
||||||
|
const session = sessionEngine.getOrCreateSession('get-or-create-new');
|
||||||
|
expect(session).toBeTruthy();
|
||||||
|
expect(session.id).toEqual('get-or-create-new');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getOrCreateSession should return existing session', async () => {
|
||||||
|
const created = sessionEngine.startSession('get-or-create-existing');
|
||||||
|
const retrieved = sessionEngine.getOrCreateSession('get-or-create-existing');
|
||||||
|
|
||||||
|
expect(retrieved.id).toEqual(created.id);
|
||||||
|
expect(retrieved.createdAt).toEqual(created.createdAt);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: getOrCreateSession should touch existing session', async () => {
|
||||||
|
const session = sessionEngine.startSession('get-or-create-touch');
|
||||||
|
const originalLastActivity = session.lastActivityAt;
|
||||||
|
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 10));
|
||||||
|
|
||||||
|
sessionEngine.getOrCreateSession('get-or-create-touch');
|
||||||
|
const updated = sessionEngine.getSession('get-or-create-touch');
|
||||||
|
|
||||||
|
expect(updated!.lastActivityAt).toBeGreaterThanOrEqual(originalLastActivity);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// extractSessionId Static Method Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('session: extractSessionId should handle UUID object', async () => {
|
||||||
|
const { ObjectId } = smartmongo.tsmdb;
|
||||||
|
const uuid = new smartmongo.tsmdb.plugins.bson.UUID();
|
||||||
|
const lsid = { id: uuid };
|
||||||
|
|
||||||
|
const extracted = SessionEngine.extractSessionId(lsid);
|
||||||
|
expect(extracted).toEqual(uuid.toHexString());
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: extractSessionId should handle string ID', async () => {
|
||||||
|
const lsid = { id: 'string-session-id' };
|
||||||
|
|
||||||
|
const extracted = SessionEngine.extractSessionId(lsid);
|
||||||
|
expect(extracted).toEqual('string-session-id');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: extractSessionId should handle binary format', async () => {
|
||||||
|
const binaryData = Buffer.from('test-binary-uuid', 'utf8').toString('base64');
|
||||||
|
const lsid = { id: { $binary: { base64: binaryData } } };
|
||||||
|
|
||||||
|
const extracted = SessionEngine.extractSessionId(lsid);
|
||||||
|
expect(extracted).toBeTruthy();
|
||||||
|
expect(typeof extracted).toEqual('string');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: extractSessionId should return undefined for null/undefined', async () => {
|
||||||
|
expect(SessionEngine.extractSessionId(null)).toBeUndefined();
|
||||||
|
expect(SessionEngine.extractSessionId(undefined)).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: extractSessionId should return undefined for empty object', async () => {
|
||||||
|
expect(SessionEngine.extractSessionId({})).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// refreshSession Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('session: refreshSession should update lastActivityAt', async () => {
|
||||||
|
const session = sessionEngine.startSession('refresh-session-test');
|
||||||
|
const originalLastActivity = session.lastActivityAt;
|
||||||
|
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 10));
|
||||||
|
|
||||||
|
const refreshed = sessionEngine.refreshSession('refresh-session-test');
|
||||||
|
expect(refreshed).toBeTrue();
|
||||||
|
|
||||||
|
const updated = sessionEngine.getSession('refresh-session-test');
|
||||||
|
expect(updated!.lastActivityAt).toBeGreaterThanOrEqual(originalLastActivity);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('session: refreshSession should return false for non-existent session', async () => {
|
||||||
|
const refreshed = sessionEngine.refreshSession('non-existent-refresh-session');
|
||||||
|
expect(refreshed).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Cleanup
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('session: close should clear all sessions', async () => {
|
||||||
|
sessionEngine.startSession('close-test-session');
|
||||||
|
expect(sessionEngine.getSessionCount()).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
sessionEngine.close();
|
||||||
|
|
||||||
|
expect(sessionEngine.getSessionCount()).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
572
test/test.tsmdb.ts
Normal file
572
test/test.tsmdb.ts
Normal file
@@ -0,0 +1,572 @@
|
|||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as smartmongo from '../ts/index.js';
|
||||||
|
import { MongoClient, Db, Collection } from 'mongodb';
|
||||||
|
|
||||||
|
const { tsmdb } = smartmongo;
|
||||||
|
|
||||||
|
let server: smartmongo.tsmdb.TsmdbServer;
|
||||||
|
let client: MongoClient;
|
||||||
|
let db: Db;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Server Startup
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('tsmdb: should start the server', async () => {
|
||||||
|
server = new tsmdb.TsmdbServer({ port: 27117 }); // Use non-standard port for tests
|
||||||
|
await server.start();
|
||||||
|
expect(server.running).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: should connect with official MongoClient', async () => {
|
||||||
|
client = new MongoClient('mongodb://127.0.0.1:27117', {
|
||||||
|
directConnection: true,
|
||||||
|
serverSelectionTimeoutMS: 5000,
|
||||||
|
});
|
||||||
|
await client.connect();
|
||||||
|
expect(client).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: should get a database instance', async () => {
|
||||||
|
db = client.db('testdb');
|
||||||
|
expect(db).toBeTruthy();
|
||||||
|
expect(db.databaseName).toEqual('testdb');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Basic CRUD Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('tsmdb: insertOne - should insert a document', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const result = await collection.insertOne({
|
||||||
|
name: 'John Doe',
|
||||||
|
email: 'john@example.com',
|
||||||
|
age: 30,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.acknowledged).toBeTrue();
|
||||||
|
expect(result.insertedId).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: insertMany - should insert multiple documents', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const result = await collection.insertMany([
|
||||||
|
{ name: 'Jane Doe', email: 'jane@example.com', age: 25 },
|
||||||
|
{ name: 'Bob Smith', email: 'bob@example.com', age: 35 },
|
||||||
|
{ name: 'Alice Johnson', email: 'alice@example.com', age: 28 },
|
||||||
|
]);
|
||||||
|
|
||||||
|
expect(result.acknowledged).toBeTrue();
|
||||||
|
expect(result.insertedCount).toEqual(3);
|
||||||
|
expect(Object.keys(result.insertedIds).length).toEqual(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: findOne - should find a single document', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const doc = await collection.findOne({ name: 'John Doe' });
|
||||||
|
|
||||||
|
expect(doc).toBeTruthy();
|
||||||
|
expect(doc!.name).toEqual('John Doe');
|
||||||
|
expect(doc!.email).toEqual('john@example.com');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: find - should find multiple documents', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const docs = await collection.find({ age: { $gte: 28 } }).toArray();
|
||||||
|
|
||||||
|
expect(docs.length).toEqual(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: updateOne - should update a single document', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const result = await collection.updateOne(
|
||||||
|
{ name: 'John Doe' },
|
||||||
|
{ $set: { age: 31 } }
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result.acknowledged).toBeTrue();
|
||||||
|
expect(result.matchedCount).toEqual(1);
|
||||||
|
expect(result.modifiedCount).toEqual(1);
|
||||||
|
|
||||||
|
const updated = await collection.findOne({ name: 'John Doe' });
|
||||||
|
expect(updated!.age).toEqual(31);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: updateMany - should update multiple documents', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const result = await collection.updateMany(
|
||||||
|
{ age: { $gte: 30 } },
|
||||||
|
{ $set: { senior: true } }
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result.acknowledged).toBeTrue();
|
||||||
|
expect(result.matchedCount).toEqual(2);
|
||||||
|
expect(result.modifiedCount).toEqual(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: deleteOne - should delete a single document', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const result = await collection.deleteOne({ name: 'Bob Smith' });
|
||||||
|
|
||||||
|
expect(result.acknowledged).toBeTrue();
|
||||||
|
expect(result.deletedCount).toEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: deleteMany - should delete multiple documents', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
|
||||||
|
// First add some test docs to delete
|
||||||
|
await collection.insertMany([
|
||||||
|
{ name: 'Delete1', toDelete: true },
|
||||||
|
{ name: 'Delete2', toDelete: true },
|
||||||
|
]);
|
||||||
|
|
||||||
|
const result = await collection.deleteMany({ toDelete: true });
|
||||||
|
|
||||||
|
expect(result.acknowledged).toBeTrue();
|
||||||
|
expect(result.deletedCount).toEqual(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Query Operator Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('tsmdb: query - $eq operator', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const docs = await collection.find({ name: { $eq: 'Jane Doe' } }).toArray();
|
||||||
|
expect(docs.length).toEqual(1);
|
||||||
|
expect(docs[0].name).toEqual('Jane Doe');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: query - $ne operator', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const docs = await collection.find({ name: { $ne: 'Jane Doe' } }).toArray();
|
||||||
|
expect(docs.every(d => d.name !== 'Jane Doe')).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: query - $gt and $lt operators', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const docs = await collection.find({ age: { $gt: 25, $lt: 35 } }).toArray();
|
||||||
|
expect(docs.every(d => d.age > 25 && d.age < 35)).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: query - $in operator', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const docs = await collection.find({ name: { $in: ['Jane Doe', 'Alice Johnson'] } }).toArray();
|
||||||
|
expect(docs.length).toEqual(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: query - $or operator', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const docs = await collection.find({
|
||||||
|
$or: [
|
||||||
|
{ name: 'Jane Doe' },
|
||||||
|
{ age: 31 }
|
||||||
|
]
|
||||||
|
}).toArray();
|
||||||
|
expect(docs.length).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: query - $and operator', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const docs = await collection.find({
|
||||||
|
$and: [
|
||||||
|
{ age: { $gte: 25 } },
|
||||||
|
{ age: { $lte: 30 } }
|
||||||
|
]
|
||||||
|
}).toArray();
|
||||||
|
expect(docs.every(d => d.age >= 25 && d.age <= 30)).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: query - $exists operator', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const docs = await collection.find({ senior: { $exists: true } }).toArray();
|
||||||
|
expect(docs.every(d => 'senior' in d)).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Update Operator Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('tsmdb: update - $inc operator', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
await collection.updateOne(
|
||||||
|
{ name: 'Jane Doe' },
|
||||||
|
{ $inc: { age: 1 } }
|
||||||
|
);
|
||||||
|
|
||||||
|
const updated = await collection.findOne({ name: 'Jane Doe' });
|
||||||
|
expect(updated!.age).toEqual(26);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: update - $unset operator', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
await collection.updateOne(
|
||||||
|
{ name: 'Jane Doe' },
|
||||||
|
{ $unset: { senior: '' } }
|
||||||
|
);
|
||||||
|
|
||||||
|
const updated = await collection.findOne({ name: 'Jane Doe' });
|
||||||
|
expect('senior' in updated!).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: update - $push operator', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
await collection.updateOne(
|
||||||
|
{ name: 'Jane Doe' },
|
||||||
|
{ $set: { tags: ['developer'] } }
|
||||||
|
);
|
||||||
|
await collection.updateOne(
|
||||||
|
{ name: 'Jane Doe' },
|
||||||
|
{ $push: { tags: 'tester' } }
|
||||||
|
);
|
||||||
|
|
||||||
|
const updated = await collection.findOne({ name: 'Jane Doe' });
|
||||||
|
expect(updated!.tags).toContain('developer');
|
||||||
|
expect(updated!.tags).toContain('tester');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: update - $pull operator', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
await collection.updateOne(
|
||||||
|
{ name: 'Jane Doe' },
|
||||||
|
{ $pull: { tags: 'tester' } }
|
||||||
|
);
|
||||||
|
|
||||||
|
const updated = await collection.findOne({ name: 'Jane Doe' });
|
||||||
|
expect(updated!.tags).not.toContain('tester');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: update - upsert creates new document', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const result = await collection.updateOne(
|
||||||
|
{ name: 'New User' },
|
||||||
|
{ $set: { email: 'new@example.com', age: 40 } },
|
||||||
|
{ upsert: true }
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result.upsertedCount).toEqual(1);
|
||||||
|
expect(result.upsertedId).toBeTruthy();
|
||||||
|
|
||||||
|
const inserted = await collection.findOne({ name: 'New User' });
|
||||||
|
expect(inserted).toBeTruthy();
|
||||||
|
expect(inserted!.email).toEqual('new@example.com');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Cursor Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('tsmdb: cursor - sort', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const docs = await collection.find({}).sort({ age: -1 }).toArray();
|
||||||
|
|
||||||
|
for (let i = 1; i < docs.length; i++) {
|
||||||
|
if (docs[i-1].age !== undefined && docs[i].age !== undefined) {
|
||||||
|
expect(docs[i-1].age).toBeGreaterThanOrEqual(docs[i].age);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: cursor - limit', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const docs = await collection.find({}).limit(2).toArray();
|
||||||
|
expect(docs.length).toBeLessThanOrEqual(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: cursor - skip', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const allDocs = await collection.find({}).toArray();
|
||||||
|
const skippedDocs = await collection.find({}).skip(1).toArray();
|
||||||
|
|
||||||
|
expect(skippedDocs.length).toEqual(Math.max(0, allDocs.length - 1));
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: cursor - project', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const docs = await collection.find({}).project({ name: 1, _id: 0 }).toArray();
|
||||||
|
|
||||||
|
expect(docs.length).toBeGreaterThan(0);
|
||||||
|
expect(docs[0].name).toBeTruthy();
|
||||||
|
expect(docs[0].email).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// FindOneAnd* Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('tsmdb: findOneAndUpdate - returns updated document', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const result = await collection.findOneAndUpdate(
|
||||||
|
{ name: 'Jane Doe' },
|
||||||
|
{ $set: { status: 'active' } },
|
||||||
|
{ returnDocument: 'after' }
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toBeTruthy();
|
||||||
|
expect(result!.status).toEqual('active');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: findOneAndDelete - returns deleted document', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
|
||||||
|
// Insert a temp doc to delete
|
||||||
|
await collection.insertOne({ name: 'TempUser', temp: true });
|
||||||
|
|
||||||
|
const result = await collection.findOneAndDelete({ name: 'TempUser' });
|
||||||
|
|
||||||
|
expect(result).toBeTruthy();
|
||||||
|
expect(result!.name).toEqual('TempUser');
|
||||||
|
|
||||||
|
// Verify deleted
|
||||||
|
const found = await collection.findOne({ name: 'TempUser' });
|
||||||
|
expect(found).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Count and Distinct Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('tsmdb: countDocuments - counts matching documents', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const count = await collection.countDocuments({ age: { $gte: 25 } });
|
||||||
|
expect(count).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: estimatedDocumentCount - returns total count', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const count = await collection.estimatedDocumentCount();
|
||||||
|
expect(count).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: distinct - returns unique values', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const names = await collection.distinct('name');
|
||||||
|
|
||||||
|
expect(names.length).toBeGreaterThan(0);
|
||||||
|
// All names should be unique
|
||||||
|
expect(new Set(names).size).toEqual(names.length);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Index Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('tsmdb: createIndex - creates a single index', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const indexName = await collection.createIndex({ email: 1 });
|
||||||
|
|
||||||
|
expect(indexName).toBeTruthy();
|
||||||
|
expect(indexName).toContain('email');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: createIndex - creates compound index', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const indexName = await collection.createIndex({ name: 1, age: -1 });
|
||||||
|
|
||||||
|
expect(indexName).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: listIndexes - lists all indexes', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const indexes = await collection.listIndexes().toArray();
|
||||||
|
|
||||||
|
expect(indexes.length).toBeGreaterThanOrEqual(1); // At least _id index
|
||||||
|
expect(indexes.some(i => i.name === '_id_')).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: dropIndex - drops an index', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const indexName = await collection.createIndex({ toDropField: 1 });
|
||||||
|
|
||||||
|
await collection.dropIndex(indexName);
|
||||||
|
|
||||||
|
const indexes = await collection.listIndexes().toArray();
|
||||||
|
expect(indexes.some(i => i.name === indexName)).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Aggregation Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('tsmdb: aggregate - $match stage', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const results = await collection.aggregate([
|
||||||
|
{ $match: { age: { $gte: 25 } } }
|
||||||
|
]).toArray();
|
||||||
|
|
||||||
|
expect(results.length).toBeGreaterThan(0);
|
||||||
|
expect(results.every(d => d.age >= 25)).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: aggregate - $project stage', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const results = await collection.aggregate([
|
||||||
|
{ $project: { name: 1, _id: 0 } }
|
||||||
|
]).toArray();
|
||||||
|
|
||||||
|
expect(results.length).toBeGreaterThan(0);
|
||||||
|
expect(results[0].name).toBeTruthy();
|
||||||
|
expect(results[0].email).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: aggregate - $sort stage', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const results = await collection.aggregate([
|
||||||
|
{ $match: { age: { $exists: true } } },
|
||||||
|
{ $sort: { age: 1 } }
|
||||||
|
]).toArray();
|
||||||
|
|
||||||
|
for (let i = 1; i < results.length; i++) {
|
||||||
|
expect(results[i].age).toBeGreaterThanOrEqual(results[i-1].age);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: aggregate - $group stage', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
|
||||||
|
// Add some categorized data
|
||||||
|
await collection.insertMany([
|
||||||
|
{ name: 'GroupTest1', category: 'A', value: 10 },
|
||||||
|
{ name: 'GroupTest2', category: 'A', value: 20 },
|
||||||
|
{ name: 'GroupTest3', category: 'B', value: 30 },
|
||||||
|
]);
|
||||||
|
|
||||||
|
const results = await collection.aggregate([
|
||||||
|
{ $match: { category: { $exists: true } } },
|
||||||
|
{ $group: { _id: '$category', total: { $sum: '$value' } } }
|
||||||
|
]).toArray();
|
||||||
|
|
||||||
|
expect(results.length).toEqual(2);
|
||||||
|
const groupA = results.find(r => r._id === 'A');
|
||||||
|
const groupB = results.find(r => r._id === 'B');
|
||||||
|
expect(groupA!.total).toEqual(30);
|
||||||
|
expect(groupB!.total).toEqual(30);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: aggregate - $limit and $skip stages', async () => {
|
||||||
|
const collection = db.collection('users');
|
||||||
|
const results = await collection.aggregate([
|
||||||
|
{ $skip: 1 },
|
||||||
|
{ $limit: 2 }
|
||||||
|
]).toArray();
|
||||||
|
|
||||||
|
expect(results.length).toBeLessThanOrEqual(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Bulk Operations Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('tsmdb: bulkWrite - executes multiple operations', async () => {
|
||||||
|
const collection = db.collection('bulktest');
|
||||||
|
|
||||||
|
const result = await collection.bulkWrite([
|
||||||
|
{ insertOne: { document: { name: 'Bulk1', value: 1 } } },
|
||||||
|
{ insertOne: { document: { name: 'Bulk2', value: 2 } } },
|
||||||
|
{ updateOne: { filter: { name: 'Bulk1' }, update: { $set: { updated: true } } } },
|
||||||
|
]);
|
||||||
|
|
||||||
|
expect(result.insertedCount).toEqual(2);
|
||||||
|
expect(result.modifiedCount).toEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Database Operations Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('tsmdb: listCollections - lists all collections', async () => {
|
||||||
|
const collections = await db.listCollections().toArray();
|
||||||
|
expect(collections.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: createCollection - creates a new collection', async () => {
|
||||||
|
await db.createCollection('newcollection');
|
||||||
|
const collections = await db.listCollections().toArray();
|
||||||
|
expect(collections.some(c => c.name === 'newcollection')).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: dropCollection - drops a collection', async () => {
|
||||||
|
await db.createCollection('todrop');
|
||||||
|
await db.dropCollection('todrop');
|
||||||
|
const collections = await db.listCollections().toArray();
|
||||||
|
expect(collections.some(c => c.name === 'todrop')).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Admin Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('tsmdb: admin - listDatabases', async () => {
|
||||||
|
const admin = client.db().admin();
|
||||||
|
const result = await admin.listDatabases();
|
||||||
|
expect(result.ok).toEqual(1);
|
||||||
|
expect(result.databases).toBeArray();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: admin - serverStatus', async () => {
|
||||||
|
const admin = client.db().admin();
|
||||||
|
const status = await admin.serverStatus();
|
||||||
|
expect(status.ok).toEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: admin - ping', async () => {
|
||||||
|
const admin = client.db().admin();
|
||||||
|
const result = await admin.ping();
|
||||||
|
expect(result.ok).toEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Replace Operations Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('tsmdb: replaceOne - replaces entire document', async () => {
|
||||||
|
const collection = db.collection('replacetest');
|
||||||
|
await collection.insertOne({ name: 'Original', field1: 'value1', field2: 'value2' });
|
||||||
|
|
||||||
|
const result = await collection.replaceOne(
|
||||||
|
{ name: 'Original' },
|
||||||
|
{ name: 'Replaced', newField: 'newValue' }
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result.matchedCount).toEqual(1);
|
||||||
|
expect(result.modifiedCount).toEqual(1);
|
||||||
|
|
||||||
|
const replaced = await collection.findOne({ name: 'Replaced' });
|
||||||
|
expect(replaced).toBeTruthy();
|
||||||
|
expect(replaced!.newField).toEqual('newValue');
|
||||||
|
expect(replaced!.field1).toBeUndefined();
|
||||||
|
expect(replaced!.field2).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: findOneAndReplace - returns replaced document', async () => {
|
||||||
|
const collection = db.collection('replacetest');
|
||||||
|
await collection.insertOne({ name: 'ToReplace', data: 'old' });
|
||||||
|
|
||||||
|
const result = await collection.findOneAndReplace(
|
||||||
|
{ name: 'ToReplace' },
|
||||||
|
{ name: 'Replaced', data: 'new' },
|
||||||
|
{ returnDocument: 'after' }
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toBeTruthy();
|
||||||
|
expect(result!.data).toEqual('new');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Cleanup
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('tsmdb: cleanup - drop database', async () => {
|
||||||
|
const result = await db.dropDatabase();
|
||||||
|
expect(result).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('tsmdb: cleanup - close client and server', async () => {
|
||||||
|
await client.close();
|
||||||
|
await server.stop();
|
||||||
|
expect(server.running).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
411
test/test.tsmdb.wal.ts
Normal file
411
test/test.tsmdb.wal.ts
Normal file
@@ -0,0 +1,411 @@
|
|||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as smartmongo from '../ts/index.js';
|
||||||
|
import * as path from 'path';
|
||||||
|
import * as fs from 'fs/promises';
|
||||||
|
|
||||||
|
const { WAL, ObjectId } = smartmongo.tsmdb;
|
||||||
|
|
||||||
|
let wal: InstanceType<typeof WAL>;
|
||||||
|
const TEST_WAL_PATH = '/tmp/tsmdb-test-wal/test.wal';
|
||||||
|
|
||||||
|
// Helper to clean up test files
|
||||||
|
async function cleanupTestFiles() {
|
||||||
|
try {
|
||||||
|
await fs.rm('/tmp/tsmdb-test-wal', { recursive: true, force: true });
|
||||||
|
} catch {
|
||||||
|
// Ignore if doesn't exist
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Setup
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: cleanup before tests', async () => {
|
||||||
|
await cleanupTestFiles();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: should create WAL instance', async () => {
|
||||||
|
wal = new WAL(TEST_WAL_PATH, { checkpointInterval: 100 });
|
||||||
|
expect(wal).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: should initialize WAL', async () => {
|
||||||
|
const result = await wal.initialize();
|
||||||
|
expect(result).toBeTruthy();
|
||||||
|
expect(result.recoveredEntries).toBeArray();
|
||||||
|
expect(result.recoveredEntries.length).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// LSN Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: getCurrentLsn should return 0 initially', async () => {
|
||||||
|
const lsn = wal.getCurrentLsn();
|
||||||
|
expect(lsn).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: LSN should increment after logging', async () => {
|
||||||
|
const doc = { _id: new ObjectId(), name: 'Test' };
|
||||||
|
const lsn = await wal.logInsert('testdb', 'testcoll', doc as any);
|
||||||
|
|
||||||
|
expect(lsn).toEqual(1);
|
||||||
|
expect(wal.getCurrentLsn()).toEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Insert Logging Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: logInsert should create entry with correct structure', async () => {
|
||||||
|
const doc = { _id: new ObjectId(), name: 'InsertTest', value: 42 };
|
||||||
|
const lsn = await wal.logInsert('testdb', 'insertcoll', doc as any);
|
||||||
|
|
||||||
|
expect(lsn).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
expect(entry).toBeTruthy();
|
||||||
|
expect(entry!.operation).toEqual('insert');
|
||||||
|
expect(entry!.dbName).toEqual('testdb');
|
||||||
|
expect(entry!.collName).toEqual('insertcoll');
|
||||||
|
expect(entry!.documentId).toEqual(doc._id.toHexString());
|
||||||
|
expect(entry!.data).toBeTruthy();
|
||||||
|
expect(entry!.timestamp).toBeGreaterThan(0);
|
||||||
|
expect(entry!.checksum).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: logInsert with transaction ID', async () => {
|
||||||
|
const doc = { _id: new ObjectId(), name: 'TxnInsertTest' };
|
||||||
|
const lsn = await wal.logInsert('testdb', 'insertcoll', doc as any, 'txn-123');
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
expect(entry!.txnId).toEqual('txn-123');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Update Logging Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: logUpdate should store old and new document', async () => {
|
||||||
|
const oldDoc = { _id: new ObjectId(), name: 'OldName', value: 1 };
|
||||||
|
const newDoc = { ...oldDoc, name: 'NewName', value: 2 };
|
||||||
|
|
||||||
|
const lsn = await wal.logUpdate('testdb', 'updatecoll', oldDoc as any, newDoc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
expect(entry).toBeTruthy();
|
||||||
|
expect(entry!.operation).toEqual('update');
|
||||||
|
expect(entry!.data).toBeTruthy();
|
||||||
|
expect(entry!.previousData).toBeTruthy();
|
||||||
|
expect(entry!.documentId).toEqual(oldDoc._id.toHexString());
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Delete Logging Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: logDelete should record deleted document', async () => {
|
||||||
|
const doc = { _id: new ObjectId(), name: 'ToDelete' };
|
||||||
|
|
||||||
|
const lsn = await wal.logDelete('testdb', 'deletecoll', doc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
expect(entry).toBeTruthy();
|
||||||
|
expect(entry!.operation).toEqual('delete');
|
||||||
|
expect(entry!.previousData).toBeTruthy();
|
||||||
|
expect(entry!.data).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Transaction Logging Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: logBeginTransaction should create begin entry', async () => {
|
||||||
|
const lsn = await wal.logBeginTransaction('txn-begin-test');
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
expect(entry).toBeTruthy();
|
||||||
|
expect(entry!.operation).toEqual('begin');
|
||||||
|
expect(entry!.txnId).toEqual('txn-begin-test');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: logCommitTransaction should create commit entry', async () => {
|
||||||
|
const lsn = await wal.logCommitTransaction('txn-commit-test');
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
expect(entry).toBeTruthy();
|
||||||
|
expect(entry!.operation).toEqual('commit');
|
||||||
|
expect(entry!.txnId).toEqual('txn-commit-test');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: logAbortTransaction should create abort entry', async () => {
|
||||||
|
const lsn = await wal.logAbortTransaction('txn-abort-test');
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
expect(entry).toBeTruthy();
|
||||||
|
expect(entry!.operation).toEqual('abort');
|
||||||
|
expect(entry!.txnId).toEqual('txn-abort-test');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// getTransactionEntries Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: getTransactionEntries should return entries for transaction', async () => {
|
||||||
|
// Log a complete transaction
|
||||||
|
const txnId = 'txn-entries-test';
|
||||||
|
await wal.logBeginTransaction(txnId);
|
||||||
|
|
||||||
|
const doc1 = { _id: new ObjectId(), name: 'TxnDoc1' };
|
||||||
|
await wal.logInsert('testdb', 'txncoll', doc1 as any, txnId);
|
||||||
|
|
||||||
|
const doc2 = { _id: new ObjectId(), name: 'TxnDoc2' };
|
||||||
|
await wal.logInsert('testdb', 'txncoll', doc2 as any, txnId);
|
||||||
|
|
||||||
|
await wal.logCommitTransaction(txnId);
|
||||||
|
|
||||||
|
const entries = wal.getTransactionEntries(txnId);
|
||||||
|
|
||||||
|
expect(entries.length).toEqual(4); // begin + 2 inserts + commit
|
||||||
|
expect(entries.every(e => e.txnId === txnId)).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: getTransactionEntries should return empty for unknown transaction', async () => {
|
||||||
|
const entries = wal.getTransactionEntries('unknown-txn-id');
|
||||||
|
expect(entries.length).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// getEntriesAfter Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: getEntriesAfter should filter by LSN', async () => {
|
||||||
|
const currentLsn = wal.getCurrentLsn();
|
||||||
|
|
||||||
|
// Add more entries
|
||||||
|
const doc = { _id: new ObjectId(), name: 'AfterTest' };
|
||||||
|
await wal.logInsert('testdb', 'aftercoll', doc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(currentLsn);
|
||||||
|
expect(entries.length).toEqual(1);
|
||||||
|
expect(entries[0].lsn).toBeGreaterThan(currentLsn);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: getEntriesAfter with LSN 0 should return all entries', async () => {
|
||||||
|
const entries = wal.getEntriesAfter(0);
|
||||||
|
expect(entries.length).toBeGreaterThan(0);
|
||||||
|
expect(entries.length).toEqual(wal.getCurrentLsn());
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Checkpoint Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: checkpoint should create checkpoint entry', async () => {
|
||||||
|
const lsn = await wal.checkpoint();
|
||||||
|
|
||||||
|
expect(lsn).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
// After checkpoint, getEntriesAfter(checkpoint) should be limited
|
||||||
|
const entries = wal.getEntriesAfter(0);
|
||||||
|
expect(entries.some(e => e.operation === 'checkpoint')).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Document Recovery Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: recoverDocument should deserialize document from entry', async () => {
|
||||||
|
const originalDoc = { _id: new ObjectId(), name: 'RecoverTest', nested: { a: 1, b: 2 } };
|
||||||
|
const lsn = await wal.logInsert('testdb', 'recovercoll', originalDoc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
const recovered = wal.recoverDocument(entry!);
|
||||||
|
|
||||||
|
expect(recovered).toBeTruthy();
|
||||||
|
expect(recovered!.name).toEqual('RecoverTest');
|
||||||
|
expect(recovered!.nested.a).toEqual(1);
|
||||||
|
expect(recovered!.nested.b).toEqual(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: recoverDocument should return null for entry without data', async () => {
|
||||||
|
const lsn = await wal.logBeginTransaction('recover-no-data');
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
const recovered = wal.recoverDocument(entry!);
|
||||||
|
expect(recovered).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: recoverPreviousDocument should deserialize previous state', async () => {
|
||||||
|
const oldDoc = { _id: new ObjectId(), name: 'Old', value: 100 };
|
||||||
|
const newDoc = { ...oldDoc, name: 'New', value: 200 };
|
||||||
|
|
||||||
|
const lsn = await wal.logUpdate('testdb', 'recovercoll', oldDoc as any, newDoc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
const previous = wal.recoverPreviousDocument(entry!);
|
||||||
|
|
||||||
|
expect(previous).toBeTruthy();
|
||||||
|
expect(previous!.name).toEqual('Old');
|
||||||
|
expect(previous!.value).toEqual(100);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: recoverPreviousDocument should return null for insert entry', async () => {
|
||||||
|
const doc = { _id: new ObjectId(), name: 'NoPrevious' };
|
||||||
|
const lsn = await wal.logInsert('testdb', 'recovercoll', doc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
const previous = wal.recoverPreviousDocument(entry!);
|
||||||
|
expect(previous).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// WAL Persistence and Recovery Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: should persist and recover entries', async () => {
|
||||||
|
// Close current WAL
|
||||||
|
await wal.close();
|
||||||
|
|
||||||
|
// Create new WAL instance and initialize (should recover)
|
||||||
|
const wal2 = new WAL(TEST_WAL_PATH, { checkpointInterval: 100 });
|
||||||
|
const result = await wal2.initialize();
|
||||||
|
|
||||||
|
// Should have recovered entries
|
||||||
|
expect(result.recoveredEntries).toBeArray();
|
||||||
|
// After checkpoint, there might not be many recoverable entries
|
||||||
|
// but getCurrentLsn should be preserved or reset
|
||||||
|
|
||||||
|
await wal2.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Entry Checksum Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: entries should have valid checksums', async () => {
|
||||||
|
wal = new WAL(TEST_WAL_PATH + '.checksum', { checkpointInterval: 100 });
|
||||||
|
await wal.initialize();
|
||||||
|
|
||||||
|
const doc = { _id: new ObjectId(), name: 'ChecksumTest' };
|
||||||
|
const lsn = await wal.logInsert('testdb', 'checksumcoll', doc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
expect(entry!.checksum).toBeGreaterThan(0);
|
||||||
|
expect(typeof entry!.checksum).toEqual('number');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Edge Cases
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: should handle special characters in document', async () => {
|
||||||
|
const doc = {
|
||||||
|
_id: new ObjectId(),
|
||||||
|
name: 'Test\nWith\tSpecial\r\nChars',
|
||||||
|
emoji: '🎉',
|
||||||
|
unicode: '日本語',
|
||||||
|
};
|
||||||
|
|
||||||
|
const lsn = await wal.logInsert('testdb', 'specialcoll', doc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
const recovered = wal.recoverDocument(entry!);
|
||||||
|
expect(recovered!.name).toEqual('Test\nWith\tSpecial\r\nChars');
|
||||||
|
expect(recovered!.emoji).toEqual('🎉');
|
||||||
|
expect(recovered!.unicode).toEqual('日本語');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: should handle binary data in documents', async () => {
|
||||||
|
const doc = {
|
||||||
|
_id: new ObjectId(),
|
||||||
|
binaryField: Buffer.from([0x00, 0xFF, 0x7F, 0x80]),
|
||||||
|
};
|
||||||
|
|
||||||
|
const lsn = await wal.logInsert('testdb', 'binarycoll', doc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
const recovered = wal.recoverDocument(entry!);
|
||||||
|
expect(recovered).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: should handle nested documents', async () => {
|
||||||
|
const doc = {
|
||||||
|
_id: new ObjectId(),
|
||||||
|
level1: {
|
||||||
|
level2: {
|
||||||
|
level3: {
|
||||||
|
value: 'deep',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const lsn = await wal.logInsert('testdb', 'nestedcoll', doc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
const recovered = wal.recoverDocument(entry!);
|
||||||
|
expect(recovered!.level1.level2.level3.value).toEqual('deep');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('wal: should handle arrays in documents', async () => {
|
||||||
|
const doc = {
|
||||||
|
_id: new ObjectId(),
|
||||||
|
tags: ['a', 'b', 'c'],
|
||||||
|
numbers: [1, 2, 3],
|
||||||
|
mixed: [1, 'two', { three: 3 }],
|
||||||
|
};
|
||||||
|
|
||||||
|
const lsn = await wal.logInsert('testdb', 'arraycoll', doc as any);
|
||||||
|
|
||||||
|
const entries = wal.getEntriesAfter(lsn - 1);
|
||||||
|
const entry = entries.find(e => e.lsn === lsn);
|
||||||
|
|
||||||
|
const recovered = wal.recoverDocument(entry!);
|
||||||
|
expect(recovered!.tags).toEqual(['a', 'b', 'c']);
|
||||||
|
expect(recovered!.numbers).toEqual([1, 2, 3]);
|
||||||
|
expect(recovered!.mixed[2].three).toEqual(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Cleanup
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
tap.test('wal: cleanup', async () => {
|
||||||
|
await wal.close();
|
||||||
|
await cleanupTestFiles();
|
||||||
|
expect(true).toBeTrue();
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
@@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@push.rocks/smartmongo',
|
name: '@push.rocks/smartmongo',
|
||||||
version: '2.0.14',
|
version: '4.3.0',
|
||||||
description: 'A module for creating and managing a local MongoDB instance for testing purposes.'
|
description: 'A module for creating and managing a local MongoDB instance for testing purposes.'
|
||||||
}
|
}
|
||||||
|
|||||||
75
ts/index.ts
75
ts/index.ts
@@ -1,71 +1,14 @@
|
|||||||
import { commitinfo } from './00_commitinfo_data.js';
|
import { commitinfo } from './00_commitinfo_data.js';
|
||||||
import * as plugins from './smartmongo.plugins.js';
|
|
||||||
|
|
||||||
export class SmartMongo {
|
// Export SmartMongo from ts_mongotools
|
||||||
// STATIC
|
export { SmartMongo } from './ts_mongotools/index.js';
|
||||||
public static async createAndStart(replCountArg: number = 1) {
|
|
||||||
const smartMongoInstance = new SmartMongo();
|
|
||||||
await smartMongoInstance.start(replCountArg);
|
|
||||||
return smartMongoInstance;
|
|
||||||
}
|
|
||||||
|
|
||||||
// INSTANCE
|
// Export TsmDB module
|
||||||
private _readyDeferred = plugins.smartpromise.defer();
|
export * as tsmdb from './ts_tsmdb/index.js';
|
||||||
public readyPromise = this._readyDeferred.promise;
|
|
||||||
public mongoReplicaSet: plugins.mongoPlugin.MongoMemoryReplSet;
|
|
||||||
|
|
||||||
constructor() {}
|
// Export LocalTsmDb from ts_local
|
||||||
|
export { LocalTsmDb } from './ts_local/index.js';
|
||||||
|
export type { ILocalTsmDbOptions } from './ts_local/index.js';
|
||||||
|
|
||||||
public async start(countArg: number = 1) {
|
// Export commitinfo
|
||||||
this.mongoReplicaSet = await plugins.mongoPlugin.MongoMemoryReplSet.create({
|
export { commitinfo };
|
||||||
replSet: { count: countArg },
|
|
||||||
instanceOpts: [
|
|
||||||
{
|
|
||||||
storageEngine: 'wiredTiger',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
this._readyDeferred.resolve();
|
|
||||||
console.log(`mongoReplicaSet with ${countArg} replicas started.`);
|
|
||||||
console.log(`@pushrocks/smartmongo version ${commitinfo.version}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* returns a mongo descriptor for modules like
|
|
||||||
* @pushrocks/smartfile.
|
|
||||||
*/
|
|
||||||
public async getMongoDescriptor(): Promise<plugins.smartdata.IMongoDescriptor> {
|
|
||||||
await this.readyPromise;
|
|
||||||
return {
|
|
||||||
mongoDbName: `smartmongo_testdatabase`,
|
|
||||||
mongoDbUrl: this.mongoReplicaSet.getUri(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* stops the smartmongo instance
|
|
||||||
* and cleans up after itself
|
|
||||||
*/
|
|
||||||
public async stop() {
|
|
||||||
await this.mongoReplicaSet.stop();
|
|
||||||
await this.mongoReplicaSet.cleanup();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* like stop() but allows you to actually store
|
|
||||||
* the database on disk
|
|
||||||
*/
|
|
||||||
public async stopAndDumpToDir(
|
|
||||||
dirArg: string,
|
|
||||||
nameFunctionArg?: (doc: any) => string,
|
|
||||||
emptyDirArg = true,
|
|
||||||
) {
|
|
||||||
const mongodumpInstance = new plugins.mongodump.MongoDump();
|
|
||||||
const mongodumpTarget = await mongodumpInstance.addMongoTargetByMongoDescriptor(
|
|
||||||
await this.getMongoDescriptor(),
|
|
||||||
);
|
|
||||||
await mongodumpTarget.dumpAllCollectionsToDir(dirArg, nameFunctionArg, emptyDirArg);
|
|
||||||
await mongodumpInstance.stop();
|
|
||||||
await this.stop();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
138
ts/ts_local/classes.localtsmdb.ts
Normal file
138
ts/ts_local/classes.localtsmdb.ts
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import { TsmdbServer } from '../ts_tsmdb/index.js';
|
||||||
|
import type { MongoClient } from 'mongodb';
|
||||||
|
|
||||||
|
export interface ILocalTsmDbOptions {
|
||||||
|
folderPath: string;
|
||||||
|
port?: number;
|
||||||
|
host?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* LocalTsmDb - Convenience class for local MongoDB-compatible database
|
||||||
|
*
|
||||||
|
* This class wraps TsmdbServer and provides a simple interface for
|
||||||
|
* starting a local file-based MongoDB-compatible server and connecting to it.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* import { LocalTsmDb } from '@push.rocks/smartmongo';
|
||||||
|
*
|
||||||
|
* const db = new LocalTsmDb({ folderPath: './data' });
|
||||||
|
* const client = await db.start();
|
||||||
|
*
|
||||||
|
* // Use the MongoDB client
|
||||||
|
* const collection = client.db('mydb').collection('users');
|
||||||
|
* await collection.insertOne({ name: 'Alice' });
|
||||||
|
*
|
||||||
|
* // When done
|
||||||
|
* await db.stop();
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export class LocalTsmDb {
|
||||||
|
private options: ILocalTsmDbOptions;
|
||||||
|
private server: TsmdbServer | null = null;
|
||||||
|
private client: MongoClient | null = null;
|
||||||
|
|
||||||
|
constructor(options: ILocalTsmDbOptions) {
|
||||||
|
this.options = options;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find an available port starting from the given port
|
||||||
|
*/
|
||||||
|
private async findAvailablePort(startPort = 27017): Promise<number> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const server = plugins.net.createServer();
|
||||||
|
server.listen(startPort, '127.0.0.1', () => {
|
||||||
|
const addr = server.address();
|
||||||
|
const port = typeof addr === 'object' && addr ? addr.port : startPort;
|
||||||
|
server.close(() => resolve(port));
|
||||||
|
});
|
||||||
|
server.on('error', () => {
|
||||||
|
this.findAvailablePort(startPort + 1).then(resolve).catch(reject);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start the local TsmDB server and return a connected MongoDB client
|
||||||
|
*/
|
||||||
|
async start(): Promise<MongoClient> {
|
||||||
|
if (this.server && this.client) {
|
||||||
|
throw new Error('LocalTsmDb is already running');
|
||||||
|
}
|
||||||
|
|
||||||
|
const port = this.options.port ?? await this.findAvailablePort();
|
||||||
|
const host = this.options.host ?? '127.0.0.1';
|
||||||
|
|
||||||
|
this.server = new TsmdbServer({
|
||||||
|
port,
|
||||||
|
host,
|
||||||
|
storage: 'file',
|
||||||
|
storagePath: this.options.folderPath,
|
||||||
|
});
|
||||||
|
await this.server.start();
|
||||||
|
|
||||||
|
// Dynamically import mongodb to avoid requiring it as a hard dependency
|
||||||
|
const mongodb = await import('mongodb');
|
||||||
|
this.client = new mongodb.MongoClient(this.server.getConnectionUri(), {
|
||||||
|
directConnection: true,
|
||||||
|
serverSelectionTimeoutMS: 5000,
|
||||||
|
});
|
||||||
|
await this.client.connect();
|
||||||
|
|
||||||
|
return this.client;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the MongoDB client (throws if not started)
|
||||||
|
*/
|
||||||
|
getClient(): MongoClient {
|
||||||
|
if (!this.client) {
|
||||||
|
throw new Error('LocalTsmDb is not running. Call start() first.');
|
||||||
|
}
|
||||||
|
return this.client;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the underlying TsmdbServer instance (throws if not started)
|
||||||
|
*/
|
||||||
|
getServer(): TsmdbServer {
|
||||||
|
if (!this.server) {
|
||||||
|
throw new Error('LocalTsmDb is not running. Call start() first.');
|
||||||
|
}
|
||||||
|
return this.server;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the connection URI
|
||||||
|
*/
|
||||||
|
getConnectionUri(): string {
|
||||||
|
if (!this.server) {
|
||||||
|
throw new Error('LocalTsmDb is not running. Call start() first.');
|
||||||
|
}
|
||||||
|
return this.server.getConnectionUri();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the server is running
|
||||||
|
*/
|
||||||
|
get running(): boolean {
|
||||||
|
return this.server !== null && this.server.running;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop the local TsmDB server and close the client connection
|
||||||
|
*/
|
||||||
|
async stop(): Promise<void> {
|
||||||
|
if (this.client) {
|
||||||
|
await this.client.close();
|
||||||
|
this.client = null;
|
||||||
|
}
|
||||||
|
if (this.server) {
|
||||||
|
await this.server.stop();
|
||||||
|
this.server = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
2
ts/ts_local/index.ts
Normal file
2
ts/ts_local/index.ts
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
export { LocalTsmDb } from './classes.localtsmdb.js';
|
||||||
|
export type { ILocalTsmDbOptions } from './classes.localtsmdb.js';
|
||||||
4
ts/ts_local/plugins.ts
Normal file
4
ts/ts_local/plugins.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
import * as smartpromise from '@push.rocks/smartpromise';
|
||||||
|
import * as net from 'net';
|
||||||
|
|
||||||
|
export { smartpromise, net };
|
||||||
71
ts/ts_mongotools/classes.smartmongo.ts
Normal file
71
ts/ts_mongotools/classes.smartmongo.ts
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
import { commitinfo } from '../00_commitinfo_data.js';
|
||||||
|
import * as plugins from './plugins.js';
|
||||||
|
|
||||||
|
export class SmartMongo {
|
||||||
|
// STATIC
|
||||||
|
public static async createAndStart(replCountArg: number = 1) {
|
||||||
|
const smartMongoInstance = new SmartMongo();
|
||||||
|
await smartMongoInstance.start(replCountArg);
|
||||||
|
return smartMongoInstance;
|
||||||
|
}
|
||||||
|
|
||||||
|
// INSTANCE
|
||||||
|
private _readyDeferred = plugins.smartpromise.defer();
|
||||||
|
public readyPromise = this._readyDeferred.promise;
|
||||||
|
public mongoReplicaSet: plugins.mongoPlugin.MongoMemoryReplSet;
|
||||||
|
|
||||||
|
constructor() {}
|
||||||
|
|
||||||
|
public async start(countArg: number = 1) {
|
||||||
|
this.mongoReplicaSet = await plugins.mongoPlugin.MongoMemoryReplSet.create({
|
||||||
|
replSet: { count: countArg },
|
||||||
|
instanceOpts: [
|
||||||
|
{
|
||||||
|
storageEngine: 'wiredTiger',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
this._readyDeferred.resolve();
|
||||||
|
console.log(`mongoReplicaSet with ${countArg} replicas started.`);
|
||||||
|
console.log(`@pushrocks/smartmongo version ${commitinfo.version}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* returns a mongo descriptor for modules like
|
||||||
|
* @pushrocks/smartfile.
|
||||||
|
*/
|
||||||
|
public async getMongoDescriptor(): Promise<plugins.smartdata.IMongoDescriptor> {
|
||||||
|
await this.readyPromise;
|
||||||
|
return {
|
||||||
|
mongoDbName: `smartmongo_testdatabase`,
|
||||||
|
mongoDbUrl: this.mongoReplicaSet.getUri(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* stops the smartmongo instance
|
||||||
|
* and cleans up after itself
|
||||||
|
*/
|
||||||
|
public async stop() {
|
||||||
|
await this.mongoReplicaSet.stop();
|
||||||
|
await this.mongoReplicaSet.cleanup();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* like stop() but allows you to actually store
|
||||||
|
* the database on disk
|
||||||
|
*/
|
||||||
|
public async stopAndDumpToDir(
|
||||||
|
dirArg: string,
|
||||||
|
nameFunctionArg?: (doc: any) => string,
|
||||||
|
emptyDirArg = true,
|
||||||
|
) {
|
||||||
|
const mongodumpInstance = new plugins.mongodump.MongoDump();
|
||||||
|
const mongodumpTarget = await mongodumpInstance.addMongoTargetByMongoDescriptor(
|
||||||
|
await this.getMongoDescriptor(),
|
||||||
|
);
|
||||||
|
await mongodumpTarget.dumpAllCollectionsToDir(dirArg, nameFunctionArg, emptyDirArg);
|
||||||
|
await mongodumpInstance.stop();
|
||||||
|
await this.stop();
|
||||||
|
}
|
||||||
|
}
|
||||||
2
ts/ts_mongotools/index.ts
Normal file
2
ts/ts_mongotools/index.ts
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
export * from './plugins.js';
|
||||||
|
export { SmartMongo } from './classes.smartmongo.js';
|
||||||
283
ts/ts_tsmdb/engine/AggregationEngine.ts
Normal file
283
ts/ts_tsmdb/engine/AggregationEngine.ts
Normal file
@@ -0,0 +1,283 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { Document, IStoredDocument, IAggregateOptions } from '../types/interfaces.js';
|
||||||
|
|
||||||
|
// Import mingo Aggregator
|
||||||
|
import { Aggregator } from 'mingo';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Aggregation engine using mingo for MongoDB-compatible aggregation pipeline execution
|
||||||
|
*/
|
||||||
|
export class AggregationEngine {
|
||||||
|
/**
|
||||||
|
* Execute an aggregation pipeline on a collection of documents
|
||||||
|
*/
|
||||||
|
static aggregate(
|
||||||
|
documents: IStoredDocument[],
|
||||||
|
pipeline: Document[],
|
||||||
|
options?: IAggregateOptions
|
||||||
|
): Document[] {
|
||||||
|
if (!pipeline || pipeline.length === 0) {
|
||||||
|
return documents;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create mingo aggregator with the pipeline
|
||||||
|
const aggregator = new Aggregator(pipeline, {
|
||||||
|
collation: options?.collation as any,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Run the aggregation
|
||||||
|
const result = aggregator.run(documents);
|
||||||
|
|
||||||
|
return Array.isArray(result) ? result : [];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute aggregation and return an iterator for lazy evaluation
|
||||||
|
*/
|
||||||
|
static *aggregateIterator(
|
||||||
|
documents: IStoredDocument[],
|
||||||
|
pipeline: Document[],
|
||||||
|
options?: IAggregateOptions
|
||||||
|
): Generator<Document> {
|
||||||
|
const aggregator = new Aggregator(pipeline, {
|
||||||
|
collation: options?.collation as any,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get the cursor from mingo
|
||||||
|
const cursor = aggregator.stream(documents);
|
||||||
|
|
||||||
|
for (const doc of cursor) {
|
||||||
|
yield doc;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a $lookup stage manually (for cross-collection lookups)
|
||||||
|
* This is used when the lookup references another collection in the same database
|
||||||
|
*/
|
||||||
|
static executeLookup(
|
||||||
|
documents: IStoredDocument[],
|
||||||
|
lookupSpec: {
|
||||||
|
from: string;
|
||||||
|
localField: string;
|
||||||
|
foreignField: string;
|
||||||
|
as: string;
|
||||||
|
},
|
||||||
|
foreignCollection: IStoredDocument[]
|
||||||
|
): Document[] {
|
||||||
|
const { localField, foreignField, as } = lookupSpec;
|
||||||
|
|
||||||
|
return documents.map(doc => {
|
||||||
|
const localValue = this.getNestedValue(doc, localField);
|
||||||
|
const matches = foreignCollection.filter(foreignDoc => {
|
||||||
|
const foreignValue = this.getNestedValue(foreignDoc, foreignField);
|
||||||
|
return this.valuesMatch(localValue, foreignValue);
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
...doc,
|
||||||
|
[as]: matches,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a $graphLookup stage manually
|
||||||
|
*/
|
||||||
|
static executeGraphLookup(
|
||||||
|
documents: IStoredDocument[],
|
||||||
|
graphLookupSpec: {
|
||||||
|
from: string;
|
||||||
|
startWith: string | Document;
|
||||||
|
connectFromField: string;
|
||||||
|
connectToField: string;
|
||||||
|
as: string;
|
||||||
|
maxDepth?: number;
|
||||||
|
depthField?: string;
|
||||||
|
restrictSearchWithMatch?: Document;
|
||||||
|
},
|
||||||
|
foreignCollection: IStoredDocument[]
|
||||||
|
): Document[] {
|
||||||
|
const {
|
||||||
|
startWith,
|
||||||
|
connectFromField,
|
||||||
|
connectToField,
|
||||||
|
as,
|
||||||
|
maxDepth = 10,
|
||||||
|
depthField,
|
||||||
|
restrictSearchWithMatch,
|
||||||
|
} = graphLookupSpec;
|
||||||
|
|
||||||
|
return documents.map(doc => {
|
||||||
|
const startValue = typeof startWith === 'string' && startWith.startsWith('$')
|
||||||
|
? this.getNestedValue(doc, startWith.slice(1))
|
||||||
|
: startWith;
|
||||||
|
|
||||||
|
const results: Document[] = [];
|
||||||
|
const visited = new Set<string>();
|
||||||
|
const queue: Array<{ value: any; depth: number }> = [];
|
||||||
|
|
||||||
|
// Initialize with start value(s)
|
||||||
|
const startValues = Array.isArray(startValue) ? startValue : [startValue];
|
||||||
|
for (const val of startValues) {
|
||||||
|
queue.push({ value: val, depth: 0 });
|
||||||
|
}
|
||||||
|
|
||||||
|
while (queue.length > 0) {
|
||||||
|
const { value, depth } = queue.shift()!;
|
||||||
|
if (depth > maxDepth) continue;
|
||||||
|
|
||||||
|
const valueKey = JSON.stringify(value);
|
||||||
|
if (visited.has(valueKey)) continue;
|
||||||
|
visited.add(valueKey);
|
||||||
|
|
||||||
|
// Find matching documents
|
||||||
|
for (const foreignDoc of foreignCollection) {
|
||||||
|
const foreignValue = this.getNestedValue(foreignDoc, connectToField);
|
||||||
|
|
||||||
|
if (this.valuesMatch(value, foreignValue)) {
|
||||||
|
// Check restrictSearchWithMatch
|
||||||
|
if (restrictSearchWithMatch) {
|
||||||
|
const matchQuery = new plugins.mingo.Query(restrictSearchWithMatch);
|
||||||
|
if (!matchQuery.test(foreignDoc)) continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const resultDoc = depthField
|
||||||
|
? { ...foreignDoc, [depthField]: depth }
|
||||||
|
: { ...foreignDoc };
|
||||||
|
|
||||||
|
// Avoid duplicates in results
|
||||||
|
const docKey = foreignDoc._id.toHexString();
|
||||||
|
if (!results.some(r => r._id?.toHexString?.() === docKey)) {
|
||||||
|
results.push(resultDoc);
|
||||||
|
|
||||||
|
// Add connected values to queue
|
||||||
|
const nextValue = this.getNestedValue(foreignDoc, connectFromField);
|
||||||
|
if (nextValue !== undefined) {
|
||||||
|
const nextValues = Array.isArray(nextValue) ? nextValue : [nextValue];
|
||||||
|
for (const nv of nextValues) {
|
||||||
|
queue.push({ value: nv, depth: depth + 1 });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
...doc,
|
||||||
|
[as]: results,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a $facet stage manually
|
||||||
|
*/
|
||||||
|
static executeFacet(
|
||||||
|
documents: IStoredDocument[],
|
||||||
|
facetSpec: Record<string, Document[]>
|
||||||
|
): Document {
|
||||||
|
const result: Document = {};
|
||||||
|
|
||||||
|
for (const [facetName, pipeline] of Object.entries(facetSpec)) {
|
||||||
|
result[facetName] = this.aggregate(documents, pipeline);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a $unionWith stage
|
||||||
|
*/
|
||||||
|
static executeUnionWith(
|
||||||
|
documents: IStoredDocument[],
|
||||||
|
otherDocuments: IStoredDocument[],
|
||||||
|
pipeline?: Document[]
|
||||||
|
): Document[] {
|
||||||
|
let unionDocs: Document[] = otherDocuments;
|
||||||
|
if (pipeline && pipeline.length > 0) {
|
||||||
|
unionDocs = this.aggregate(otherDocuments, pipeline);
|
||||||
|
}
|
||||||
|
return [...documents, ...unionDocs];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a $merge stage (output to another collection)
|
||||||
|
* Returns the documents that would be inserted/updated
|
||||||
|
*/
|
||||||
|
static prepareMerge(
|
||||||
|
documents: Document[],
|
||||||
|
mergeSpec: {
|
||||||
|
into: string;
|
||||||
|
on?: string | string[];
|
||||||
|
whenMatched?: 'replace' | 'keepExisting' | 'merge' | 'fail' | Document[];
|
||||||
|
whenNotMatched?: 'insert' | 'discard' | 'fail';
|
||||||
|
}
|
||||||
|
): {
|
||||||
|
toInsert: Document[];
|
||||||
|
toUpdate: Array<{ filter: Document; update: Document }>;
|
||||||
|
onField: string | string[];
|
||||||
|
whenMatched: string | Document[];
|
||||||
|
whenNotMatched: string;
|
||||||
|
} {
|
||||||
|
const onField = mergeSpec.on || '_id';
|
||||||
|
const whenMatched = mergeSpec.whenMatched || 'merge';
|
||||||
|
const whenNotMatched = mergeSpec.whenNotMatched || 'insert';
|
||||||
|
|
||||||
|
return {
|
||||||
|
toInsert: [],
|
||||||
|
toUpdate: [],
|
||||||
|
onField,
|
||||||
|
whenMatched,
|
||||||
|
whenNotMatched,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Helper Methods
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
private static getNestedValue(obj: any, path: string): any {
|
||||||
|
const parts = path.split('.');
|
||||||
|
let current = obj;
|
||||||
|
|
||||||
|
for (const part of parts) {
|
||||||
|
if (current === null || current === undefined) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
current = current[part];
|
||||||
|
}
|
||||||
|
|
||||||
|
return current;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static valuesMatch(a: any, b: any): boolean {
|
||||||
|
if (a === b) return true;
|
||||||
|
|
||||||
|
// Handle ObjectId comparison
|
||||||
|
if (a instanceof plugins.bson.ObjectId && b instanceof plugins.bson.ObjectId) {
|
||||||
|
return a.equals(b);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle array contains check
|
||||||
|
if (Array.isArray(a)) {
|
||||||
|
return a.some(item => this.valuesMatch(item, b));
|
||||||
|
}
|
||||||
|
if (Array.isArray(b)) {
|
||||||
|
return b.some(item => this.valuesMatch(a, item));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle Date comparison
|
||||||
|
if (a instanceof Date && b instanceof Date) {
|
||||||
|
return a.getTime() === b.getTime();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle object comparison
|
||||||
|
if (typeof a === 'object' && typeof b === 'object' && a !== null && b !== null) {
|
||||||
|
return JSON.stringify(a) === JSON.stringify(b);
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
798
ts/ts_tsmdb/engine/IndexEngine.ts
Normal file
798
ts/ts_tsmdb/engine/IndexEngine.ts
Normal file
@@ -0,0 +1,798 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { IStorageAdapter } from '../storage/IStorageAdapter.js';
|
||||||
|
|
||||||
|
// Simple B-Tree implementation for range queries
|
||||||
|
// Since sorted-btree has ESM/CJS interop issues, we use a simple custom implementation
|
||||||
|
class SimpleBTree<K, V> {
|
||||||
|
private entries: Map<string, { key: K; value: V }> = new Map();
|
||||||
|
private sortedKeys: K[] = [];
|
||||||
|
private comparator: (a: K, b: K) => number;
|
||||||
|
|
||||||
|
constructor(_unused?: undefined, comparator?: (a: K, b: K) => number) {
|
||||||
|
this.comparator = comparator || ((a: K, b: K) => {
|
||||||
|
if (a < b) return -1;
|
||||||
|
if (a > b) return 1;
|
||||||
|
return 0;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private keyToString(key: K): string {
|
||||||
|
return JSON.stringify(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
set(key: K, value: V): boolean {
|
||||||
|
const keyStr = this.keyToString(key);
|
||||||
|
const existed = this.entries.has(keyStr);
|
||||||
|
this.entries.set(keyStr, { key, value });
|
||||||
|
|
||||||
|
if (!existed) {
|
||||||
|
// Insert in sorted order
|
||||||
|
const idx = this.sortedKeys.findIndex(k => this.comparator(k, key) > 0);
|
||||||
|
if (idx === -1) {
|
||||||
|
this.sortedKeys.push(key);
|
||||||
|
} else {
|
||||||
|
this.sortedKeys.splice(idx, 0, key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return !existed;
|
||||||
|
}
|
||||||
|
|
||||||
|
get(key: K): V | undefined {
|
||||||
|
const entry = this.entries.get(this.keyToString(key));
|
||||||
|
return entry?.value;
|
||||||
|
}
|
||||||
|
|
||||||
|
delete(key: K): boolean {
|
||||||
|
const keyStr = this.keyToString(key);
|
||||||
|
if (this.entries.has(keyStr)) {
|
||||||
|
this.entries.delete(keyStr);
|
||||||
|
const idx = this.sortedKeys.findIndex(k => this.comparator(k, key) === 0);
|
||||||
|
if (idx !== -1) {
|
||||||
|
this.sortedKeys.splice(idx, 1);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
forRange(
|
||||||
|
lowKey: K | undefined,
|
||||||
|
highKey: K | undefined,
|
||||||
|
lowInclusive: boolean,
|
||||||
|
highInclusive: boolean,
|
||||||
|
callback: (value: V, key: K) => void
|
||||||
|
): void {
|
||||||
|
for (const key of this.sortedKeys) {
|
||||||
|
// Check low bound
|
||||||
|
if (lowKey !== undefined) {
|
||||||
|
const cmp = this.comparator(key, lowKey);
|
||||||
|
if (cmp < 0) continue;
|
||||||
|
if (cmp === 0 && !lowInclusive) continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check high bound
|
||||||
|
if (highKey !== undefined) {
|
||||||
|
const cmp = this.comparator(key, highKey);
|
||||||
|
if (cmp > 0) break;
|
||||||
|
if (cmp === 0 && !highInclusive) break;
|
||||||
|
}
|
||||||
|
|
||||||
|
const entry = this.entries.get(this.keyToString(key));
|
||||||
|
if (entry) {
|
||||||
|
callback(entry.value, key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
import type {
|
||||||
|
Document,
|
||||||
|
IStoredDocument,
|
||||||
|
IIndexSpecification,
|
||||||
|
IIndexInfo,
|
||||||
|
ICreateIndexOptions,
|
||||||
|
} from '../types/interfaces.js';
|
||||||
|
import { TsmdbDuplicateKeyError, TsmdbIndexError } from '../errors/TsmdbErrors.js';
|
||||||
|
import { QueryEngine } from './QueryEngine.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Comparator for B-Tree that handles mixed types consistently
|
||||||
|
*/
|
||||||
|
function indexKeyComparator(a: any, b: any): number {
|
||||||
|
// Handle null/undefined
|
||||||
|
if (a === null || a === undefined) {
|
||||||
|
if (b === null || b === undefined) return 0;
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
if (b === null || b === undefined) return 1;
|
||||||
|
|
||||||
|
// Handle arrays (compound keys)
|
||||||
|
if (Array.isArray(a) && Array.isArray(b)) {
|
||||||
|
for (let i = 0; i < Math.max(a.length, b.length); i++) {
|
||||||
|
const cmp = indexKeyComparator(a[i], b[i]);
|
||||||
|
if (cmp !== 0) return cmp;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle ObjectId
|
||||||
|
if (a instanceof plugins.bson.ObjectId && b instanceof plugins.bson.ObjectId) {
|
||||||
|
return a.toHexString().localeCompare(b.toHexString());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle Date
|
||||||
|
if (a instanceof Date && b instanceof Date) {
|
||||||
|
return a.getTime() - b.getTime();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle different types - use type ordering (null < number < string < object)
|
||||||
|
const typeOrder = (v: any): number => {
|
||||||
|
if (v === null || v === undefined) return 0;
|
||||||
|
if (typeof v === 'number') return 1;
|
||||||
|
if (typeof v === 'string') return 2;
|
||||||
|
if (typeof v === 'boolean') return 3;
|
||||||
|
if (v instanceof Date) return 4;
|
||||||
|
if (v instanceof plugins.bson.ObjectId) return 5;
|
||||||
|
return 6;
|
||||||
|
};
|
||||||
|
|
||||||
|
const typeA = typeOrder(a);
|
||||||
|
const typeB = typeOrder(b);
|
||||||
|
if (typeA !== typeB) return typeA - typeB;
|
||||||
|
|
||||||
|
// Same type comparison
|
||||||
|
if (typeof a === 'number') return a - b;
|
||||||
|
if (typeof a === 'string') return a.localeCompare(b);
|
||||||
|
if (typeof a === 'boolean') return (a ? 1 : 0) - (b ? 1 : 0);
|
||||||
|
|
||||||
|
// Fallback to string comparison
|
||||||
|
return String(a).localeCompare(String(b));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index data structure using B-Tree for range queries
|
||||||
|
*/
|
||||||
|
interface IIndexData {
|
||||||
|
name: string;
|
||||||
|
key: Record<string, 1 | -1 | string>;
|
||||||
|
unique: boolean;
|
||||||
|
sparse: boolean;
|
||||||
|
expireAfterSeconds?: number;
|
||||||
|
// B-Tree for ordered index lookups (supports range queries)
|
||||||
|
btree: SimpleBTree<any, Set<string>>;
|
||||||
|
// Hash map for fast equality lookups
|
||||||
|
hashMap: Map<string, Set<string>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index engine for managing indexes and query optimization
|
||||||
|
*/
|
||||||
|
export class IndexEngine {
|
||||||
|
private dbName: string;
|
||||||
|
private collName: string;
|
||||||
|
private storage: IStorageAdapter;
|
||||||
|
private indexes: Map<string, IIndexData> = new Map();
|
||||||
|
private initialized = false;
|
||||||
|
|
||||||
|
constructor(dbName: string, collName: string, storage: IStorageAdapter) {
|
||||||
|
this.dbName = dbName;
|
||||||
|
this.collName = collName;
|
||||||
|
this.storage = storage;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize indexes from storage
|
||||||
|
*/
|
||||||
|
async initialize(): Promise<void> {
|
||||||
|
if (this.initialized) return;
|
||||||
|
|
||||||
|
const storedIndexes = await this.storage.getIndexes(this.dbName, this.collName);
|
||||||
|
const documents = await this.storage.findAll(this.dbName, this.collName);
|
||||||
|
|
||||||
|
for (const indexSpec of storedIndexes) {
|
||||||
|
const indexData: IIndexData = {
|
||||||
|
name: indexSpec.name,
|
||||||
|
key: indexSpec.key,
|
||||||
|
unique: indexSpec.unique || false,
|
||||||
|
sparse: indexSpec.sparse || false,
|
||||||
|
expireAfterSeconds: indexSpec.expireAfterSeconds,
|
||||||
|
btree: new SimpleBTree<any, Set<string>>(undefined, indexKeyComparator),
|
||||||
|
hashMap: new Map(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Build index entries
|
||||||
|
for (const doc of documents) {
|
||||||
|
const keyValue = this.extractKeyValue(doc, indexSpec.key);
|
||||||
|
if (keyValue !== null || !indexData.sparse) {
|
||||||
|
const keyStr = JSON.stringify(keyValue);
|
||||||
|
|
||||||
|
// Add to hash map
|
||||||
|
if (!indexData.hashMap.has(keyStr)) {
|
||||||
|
indexData.hashMap.set(keyStr, new Set());
|
||||||
|
}
|
||||||
|
indexData.hashMap.get(keyStr)!.add(doc._id.toHexString());
|
||||||
|
|
||||||
|
// Add to B-tree
|
||||||
|
const existing = indexData.btree.get(keyValue);
|
||||||
|
if (existing) {
|
||||||
|
existing.add(doc._id.toHexString());
|
||||||
|
} else {
|
||||||
|
indexData.btree.set(keyValue, new Set([doc._id.toHexString()]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.indexes.set(indexSpec.name, indexData);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.initialized = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new index
|
||||||
|
*/
|
||||||
|
async createIndex(
|
||||||
|
key: Record<string, 1 | -1 | 'text' | '2dsphere'>,
|
||||||
|
options?: ICreateIndexOptions
|
||||||
|
): Promise<string> {
|
||||||
|
await this.initialize();
|
||||||
|
|
||||||
|
// Generate index name if not provided
|
||||||
|
const name = options?.name || this.generateIndexName(key);
|
||||||
|
|
||||||
|
// Check if index already exists
|
||||||
|
if (this.indexes.has(name)) {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create index data structure
|
||||||
|
const indexData: IIndexData = {
|
||||||
|
name,
|
||||||
|
key: key as Record<string, 1 | -1 | string>,
|
||||||
|
unique: options?.unique || false,
|
||||||
|
sparse: options?.sparse || false,
|
||||||
|
expireAfterSeconds: options?.expireAfterSeconds,
|
||||||
|
btree: new SimpleBTree<any, Set<string>>(undefined, indexKeyComparator),
|
||||||
|
hashMap: new Map(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Build index from existing documents
|
||||||
|
const documents = await this.storage.findAll(this.dbName, this.collName);
|
||||||
|
|
||||||
|
for (const doc of documents) {
|
||||||
|
const keyValue = this.extractKeyValue(doc, key);
|
||||||
|
|
||||||
|
if (keyValue === null && indexData.sparse) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const keyStr = JSON.stringify(keyValue);
|
||||||
|
|
||||||
|
if (indexData.unique && indexData.hashMap.has(keyStr)) {
|
||||||
|
throw new TsmdbDuplicateKeyError(
|
||||||
|
`E11000 duplicate key error index: ${this.dbName}.${this.collName}.$${name}`,
|
||||||
|
key as Record<string, 1>,
|
||||||
|
keyValue
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add to hash map
|
||||||
|
if (!indexData.hashMap.has(keyStr)) {
|
||||||
|
indexData.hashMap.set(keyStr, new Set());
|
||||||
|
}
|
||||||
|
indexData.hashMap.get(keyStr)!.add(doc._id.toHexString());
|
||||||
|
|
||||||
|
// Add to B-tree
|
||||||
|
const existing = indexData.btree.get(keyValue);
|
||||||
|
if (existing) {
|
||||||
|
existing.add(doc._id.toHexString());
|
||||||
|
} else {
|
||||||
|
indexData.btree.set(keyValue, new Set([doc._id.toHexString()]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store index
|
||||||
|
this.indexes.set(name, indexData);
|
||||||
|
await this.storage.saveIndex(this.dbName, this.collName, name, {
|
||||||
|
key,
|
||||||
|
unique: options?.unique,
|
||||||
|
sparse: options?.sparse,
|
||||||
|
expireAfterSeconds: options?.expireAfterSeconds,
|
||||||
|
});
|
||||||
|
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Drop an index
|
||||||
|
*/
|
||||||
|
async dropIndex(name: string): Promise<void> {
|
||||||
|
await this.initialize();
|
||||||
|
|
||||||
|
if (name === '_id_') {
|
||||||
|
throw new TsmdbIndexError('cannot drop _id index');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.indexes.has(name)) {
|
||||||
|
throw new TsmdbIndexError(`index not found: ${name}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.indexes.delete(name);
|
||||||
|
await this.storage.dropIndex(this.dbName, this.collName, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Drop all indexes except _id
|
||||||
|
*/
|
||||||
|
async dropAllIndexes(): Promise<void> {
|
||||||
|
await this.initialize();
|
||||||
|
|
||||||
|
const names = Array.from(this.indexes.keys()).filter(n => n !== '_id_');
|
||||||
|
for (const name of names) {
|
||||||
|
this.indexes.delete(name);
|
||||||
|
await this.storage.dropIndex(this.dbName, this.collName, name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List all indexes
|
||||||
|
*/
|
||||||
|
async listIndexes(): Promise<IIndexInfo[]> {
|
||||||
|
await this.initialize();
|
||||||
|
|
||||||
|
return Array.from(this.indexes.values()).map(idx => ({
|
||||||
|
v: 2,
|
||||||
|
key: idx.key,
|
||||||
|
name: idx.name,
|
||||||
|
unique: idx.unique || undefined,
|
||||||
|
sparse: idx.sparse || undefined,
|
||||||
|
expireAfterSeconds: idx.expireAfterSeconds,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if an index exists
|
||||||
|
*/
|
||||||
|
async indexExists(name: string): Promise<boolean> {
|
||||||
|
await this.initialize();
|
||||||
|
return this.indexes.has(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update index entries after document insert
|
||||||
|
*/
|
||||||
|
async onInsert(doc: IStoredDocument): Promise<void> {
|
||||||
|
await this.initialize();
|
||||||
|
|
||||||
|
for (const [name, indexData] of this.indexes) {
|
||||||
|
const keyValue = this.extractKeyValue(doc, indexData.key);
|
||||||
|
|
||||||
|
if (keyValue === null && indexData.sparse) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const keyStr = JSON.stringify(keyValue);
|
||||||
|
|
||||||
|
// Check unique constraint
|
||||||
|
if (indexData.unique) {
|
||||||
|
const existing = indexData.hashMap.get(keyStr);
|
||||||
|
if (existing && existing.size > 0) {
|
||||||
|
throw new TsmdbDuplicateKeyError(
|
||||||
|
`E11000 duplicate key error collection: ${this.dbName}.${this.collName} index: ${name}`,
|
||||||
|
indexData.key as Record<string, 1>,
|
||||||
|
keyValue
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add to hash map
|
||||||
|
if (!indexData.hashMap.has(keyStr)) {
|
||||||
|
indexData.hashMap.set(keyStr, new Set());
|
||||||
|
}
|
||||||
|
indexData.hashMap.get(keyStr)!.add(doc._id.toHexString());
|
||||||
|
|
||||||
|
// Add to B-tree
|
||||||
|
const btreeSet = indexData.btree.get(keyValue);
|
||||||
|
if (btreeSet) {
|
||||||
|
btreeSet.add(doc._id.toHexString());
|
||||||
|
} else {
|
||||||
|
indexData.btree.set(keyValue, new Set([doc._id.toHexString()]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update index entries after document update
|
||||||
|
*/
|
||||||
|
async onUpdate(oldDoc: IStoredDocument, newDoc: IStoredDocument): Promise<void> {
|
||||||
|
await this.initialize();
|
||||||
|
|
||||||
|
for (const [name, indexData] of this.indexes) {
|
||||||
|
const oldKeyValue = this.extractKeyValue(oldDoc, indexData.key);
|
||||||
|
const newKeyValue = this.extractKeyValue(newDoc, indexData.key);
|
||||||
|
const oldKeyStr = JSON.stringify(oldKeyValue);
|
||||||
|
const newKeyStr = JSON.stringify(newKeyValue);
|
||||||
|
|
||||||
|
// Remove old entry if key changed
|
||||||
|
if (oldKeyStr !== newKeyStr) {
|
||||||
|
if (oldKeyValue !== null || !indexData.sparse) {
|
||||||
|
// Remove from hash map
|
||||||
|
const oldHashSet = indexData.hashMap.get(oldKeyStr);
|
||||||
|
if (oldHashSet) {
|
||||||
|
oldHashSet.delete(oldDoc._id.toHexString());
|
||||||
|
if (oldHashSet.size === 0) {
|
||||||
|
indexData.hashMap.delete(oldKeyStr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove from B-tree
|
||||||
|
const oldBtreeSet = indexData.btree.get(oldKeyValue);
|
||||||
|
if (oldBtreeSet) {
|
||||||
|
oldBtreeSet.delete(oldDoc._id.toHexString());
|
||||||
|
if (oldBtreeSet.size === 0) {
|
||||||
|
indexData.btree.delete(oldKeyValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add new entry
|
||||||
|
if (newKeyValue !== null || !indexData.sparse) {
|
||||||
|
// Check unique constraint
|
||||||
|
if (indexData.unique) {
|
||||||
|
const existing = indexData.hashMap.get(newKeyStr);
|
||||||
|
if (existing && existing.size > 0) {
|
||||||
|
throw new TsmdbDuplicateKeyError(
|
||||||
|
`E11000 duplicate key error collection: ${this.dbName}.${this.collName} index: ${name}`,
|
||||||
|
indexData.key as Record<string, 1>,
|
||||||
|
newKeyValue
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add to hash map
|
||||||
|
if (!indexData.hashMap.has(newKeyStr)) {
|
||||||
|
indexData.hashMap.set(newKeyStr, new Set());
|
||||||
|
}
|
||||||
|
indexData.hashMap.get(newKeyStr)!.add(newDoc._id.toHexString());
|
||||||
|
|
||||||
|
// Add to B-tree
|
||||||
|
const newBtreeSet = indexData.btree.get(newKeyValue);
|
||||||
|
if (newBtreeSet) {
|
||||||
|
newBtreeSet.add(newDoc._id.toHexString());
|
||||||
|
} else {
|
||||||
|
indexData.btree.set(newKeyValue, new Set([newDoc._id.toHexString()]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update index entries after document delete
|
||||||
|
*/
|
||||||
|
async onDelete(doc: IStoredDocument): Promise<void> {
|
||||||
|
await this.initialize();
|
||||||
|
|
||||||
|
for (const indexData of this.indexes.values()) {
|
||||||
|
const keyValue = this.extractKeyValue(doc, indexData.key);
|
||||||
|
|
||||||
|
if (keyValue === null && indexData.sparse) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const keyStr = JSON.stringify(keyValue);
|
||||||
|
|
||||||
|
// Remove from hash map
|
||||||
|
const hashSet = indexData.hashMap.get(keyStr);
|
||||||
|
if (hashSet) {
|
||||||
|
hashSet.delete(doc._id.toHexString());
|
||||||
|
if (hashSet.size === 0) {
|
||||||
|
indexData.hashMap.delete(keyStr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove from B-tree
|
||||||
|
const btreeSet = indexData.btree.get(keyValue);
|
||||||
|
if (btreeSet) {
|
||||||
|
btreeSet.delete(doc._id.toHexString());
|
||||||
|
if (btreeSet.size === 0) {
|
||||||
|
indexData.btree.delete(keyValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find the best index for a query
|
||||||
|
*/
|
||||||
|
selectIndex(filter: Document): { name: string; data: IIndexData } | null {
|
||||||
|
if (!filter || Object.keys(filter).length === 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get filter fields and operators
|
||||||
|
const filterInfo = this.analyzeFilter(filter);
|
||||||
|
|
||||||
|
// Score each index
|
||||||
|
let bestIndex: { name: string; data: IIndexData } | null = null;
|
||||||
|
let bestScore = 0;
|
||||||
|
|
||||||
|
for (const [name, indexData] of this.indexes) {
|
||||||
|
const indexFields = Object.keys(indexData.key);
|
||||||
|
let score = 0;
|
||||||
|
|
||||||
|
// Count how many index fields can be used
|
||||||
|
for (const field of indexFields) {
|
||||||
|
const info = filterInfo.get(field);
|
||||||
|
if (!info) break;
|
||||||
|
|
||||||
|
// Equality is best
|
||||||
|
if (info.equality) {
|
||||||
|
score += 2;
|
||||||
|
} else if (info.range) {
|
||||||
|
// Range queries can use B-tree
|
||||||
|
score += 1;
|
||||||
|
} else if (info.in) {
|
||||||
|
score += 1.5;
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prefer unique indexes
|
||||||
|
if (indexData.unique && score > 0) {
|
||||||
|
score += 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (score > bestScore) {
|
||||||
|
bestScore = score;
|
||||||
|
bestIndex = { name, data: indexData };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return bestIndex;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Analyze filter to extract field operators
|
||||||
|
*/
|
||||||
|
private analyzeFilter(filter: Document): Map<string, { equality: boolean; range: boolean; in: boolean; ops: Record<string, any> }> {
|
||||||
|
const result = new Map<string, { equality: boolean; range: boolean; in: boolean; ops: Record<string, any> }>();
|
||||||
|
|
||||||
|
for (const [key, value] of Object.entries(filter)) {
|
||||||
|
if (key.startsWith('$')) continue;
|
||||||
|
|
||||||
|
const info = { equality: false, range: false, in: false, ops: {} as Record<string, any> };
|
||||||
|
|
||||||
|
if (typeof value !== 'object' || value === null || value instanceof plugins.bson.ObjectId || value instanceof Date) {
|
||||||
|
info.equality = true;
|
||||||
|
info.ops['$eq'] = value;
|
||||||
|
} else {
|
||||||
|
const ops = value as Record<string, any>;
|
||||||
|
if (ops.$eq !== undefined) {
|
||||||
|
info.equality = true;
|
||||||
|
info.ops['$eq'] = ops.$eq;
|
||||||
|
}
|
||||||
|
if (ops.$in !== undefined) {
|
||||||
|
info.in = true;
|
||||||
|
info.ops['$in'] = ops.$in;
|
||||||
|
}
|
||||||
|
if (ops.$gt !== undefined || ops.$gte !== undefined || ops.$lt !== undefined || ops.$lte !== undefined) {
|
||||||
|
info.range = true;
|
||||||
|
if (ops.$gt !== undefined) info.ops['$gt'] = ops.$gt;
|
||||||
|
if (ops.$gte !== undefined) info.ops['$gte'] = ops.$gte;
|
||||||
|
if (ops.$lt !== undefined) info.ops['$lt'] = ops.$lt;
|
||||||
|
if (ops.$lte !== undefined) info.ops['$lte'] = ops.$lte;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result.set(key, info);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Use index to find candidate document IDs (supports range queries with B-tree)
|
||||||
|
*/
|
||||||
|
async findCandidateIds(filter: Document): Promise<Set<string> | null> {
|
||||||
|
await this.initialize();
|
||||||
|
|
||||||
|
const index = this.selectIndex(filter);
|
||||||
|
if (!index) return null;
|
||||||
|
|
||||||
|
const filterInfo = this.analyzeFilter(filter);
|
||||||
|
const indexFields = Object.keys(index.data.key);
|
||||||
|
|
||||||
|
// For single-field indexes with range queries, use B-tree
|
||||||
|
if (indexFields.length === 1) {
|
||||||
|
const field = indexFields[0];
|
||||||
|
const info = filterInfo.get(field);
|
||||||
|
|
||||||
|
if (info) {
|
||||||
|
// Handle equality using hash map (faster)
|
||||||
|
if (info.equality) {
|
||||||
|
const keyStr = JSON.stringify(info.ops['$eq']);
|
||||||
|
return index.data.hashMap.get(keyStr) || new Set();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle $in using hash map
|
||||||
|
if (info.in) {
|
||||||
|
const results = new Set<string>();
|
||||||
|
for (const val of info.ops['$in']) {
|
||||||
|
const keyStr = JSON.stringify(val);
|
||||||
|
const ids = index.data.hashMap.get(keyStr);
|
||||||
|
if (ids) {
|
||||||
|
for (const id of ids) {
|
||||||
|
results.add(id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle range queries using B-tree
|
||||||
|
if (info.range) {
|
||||||
|
return this.findRangeCandidates(index.data, info.ops);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// For compound indexes, use hash map with partial key matching
|
||||||
|
const equalityValues: Record<string, any> = {};
|
||||||
|
|
||||||
|
for (const field of indexFields) {
|
||||||
|
const info = filterInfo.get(field);
|
||||||
|
if (!info) break;
|
||||||
|
|
||||||
|
if (info.equality) {
|
||||||
|
equalityValues[field] = info.ops['$eq'];
|
||||||
|
} else if (info.in) {
|
||||||
|
// Handle $in with multiple lookups
|
||||||
|
const results = new Set<string>();
|
||||||
|
for (const val of info.ops['$in']) {
|
||||||
|
equalityValues[field] = val;
|
||||||
|
const keyStr = JSON.stringify(this.buildKeyValue(equalityValues, index.data.key));
|
||||||
|
const ids = index.data.hashMap.get(keyStr);
|
||||||
|
if (ids) {
|
||||||
|
for (const id of ids) {
|
||||||
|
results.add(id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
} else {
|
||||||
|
break; // Non-equality/in operator, stop here
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Object.keys(equalityValues).length > 0) {
|
||||||
|
const keyStr = JSON.stringify(this.buildKeyValue(equalityValues, index.data.key));
|
||||||
|
return index.data.hashMap.get(keyStr) || new Set();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find candidates using B-tree range scan
|
||||||
|
*/
|
||||||
|
private findRangeCandidates(indexData: IIndexData, ops: Record<string, any>): Set<string> {
|
||||||
|
const results = new Set<string>();
|
||||||
|
|
||||||
|
let lowKey: any = undefined;
|
||||||
|
let highKey: any = undefined;
|
||||||
|
let lowInclusive = true;
|
||||||
|
let highInclusive = true;
|
||||||
|
|
||||||
|
if (ops['$gt'] !== undefined) {
|
||||||
|
lowKey = ops['$gt'];
|
||||||
|
lowInclusive = false;
|
||||||
|
}
|
||||||
|
if (ops['$gte'] !== undefined) {
|
||||||
|
lowKey = ops['$gte'];
|
||||||
|
lowInclusive = true;
|
||||||
|
}
|
||||||
|
if (ops['$lt'] !== undefined) {
|
||||||
|
highKey = ops['$lt'];
|
||||||
|
highInclusive = false;
|
||||||
|
}
|
||||||
|
if (ops['$lte'] !== undefined) {
|
||||||
|
highKey = ops['$lte'];
|
||||||
|
highInclusive = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use B-tree range iteration
|
||||||
|
indexData.btree.forRange(lowKey, highKey, lowInclusive, highInclusive, (value, key) => {
|
||||||
|
if (value) {
|
||||||
|
for (const id of value) {
|
||||||
|
results.add(id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Helper Methods
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
private generateIndexName(key: Record<string, any>): string {
|
||||||
|
return Object.entries(key)
|
||||||
|
.map(([field, dir]) => `${field}_${dir}`)
|
||||||
|
.join('_');
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractKeyValue(doc: Document, key: Record<string, any>): any {
|
||||||
|
const values: any[] = [];
|
||||||
|
|
||||||
|
for (const field of Object.keys(key)) {
|
||||||
|
const value = QueryEngine.getNestedValue(doc, field);
|
||||||
|
values.push(value === undefined ? null : value);
|
||||||
|
}
|
||||||
|
|
||||||
|
// For single-field index, return the value directly
|
||||||
|
if (values.length === 1) {
|
||||||
|
return values[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
return values;
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildKeyValue(values: Record<string, any>, key: Record<string, any>): any {
|
||||||
|
const result: any[] = [];
|
||||||
|
|
||||||
|
for (const field of Object.keys(key)) {
|
||||||
|
result.push(values[field] !== undefined ? values[field] : null);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.length === 1) {
|
||||||
|
return result[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
private getFilterFields(filter: Document, prefix = ''): string[] {
|
||||||
|
const fields: string[] = [];
|
||||||
|
|
||||||
|
for (const [key, value] of Object.entries(filter)) {
|
||||||
|
if (key.startsWith('$')) {
|
||||||
|
// Logical operator
|
||||||
|
if (key === '$and' || key === '$or' || key === '$nor') {
|
||||||
|
for (const subFilter of value as Document[]) {
|
||||||
|
fields.push(...this.getFilterFields(subFilter, prefix));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const fullKey = prefix ? `${prefix}.${key}` : key;
|
||||||
|
fields.push(fullKey);
|
||||||
|
|
||||||
|
// Check for nested filters
|
||||||
|
if (typeof value === 'object' && value !== null && !Array.isArray(value)) {
|
||||||
|
const subKeys = Object.keys(value);
|
||||||
|
if (subKeys.length > 0 && !subKeys[0].startsWith('$')) {
|
||||||
|
fields.push(...this.getFilterFields(value, fullKey));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return fields;
|
||||||
|
}
|
||||||
|
|
||||||
|
private getFilterValue(filter: Document, field: string): any {
|
||||||
|
// Handle dot notation
|
||||||
|
const parts = field.split('.');
|
||||||
|
let current: any = filter;
|
||||||
|
|
||||||
|
for (const part of parts) {
|
||||||
|
if (current === null || current === undefined) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
current = current[part];
|
||||||
|
}
|
||||||
|
|
||||||
|
return current;
|
||||||
|
}
|
||||||
|
}
|
||||||
301
ts/ts_tsmdb/engine/QueryEngine.ts
Normal file
301
ts/ts_tsmdb/engine/QueryEngine.ts
Normal file
@@ -0,0 +1,301 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { Document, IStoredDocument, ISortSpecification, ISortDirection } from '../types/interfaces.js';
|
||||||
|
|
||||||
|
// Import mingo Query class
|
||||||
|
import { Query } from 'mingo';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Query engine using mingo for MongoDB-compatible query matching
|
||||||
|
*/
|
||||||
|
export class QueryEngine {
|
||||||
|
/**
|
||||||
|
* Filter documents by a MongoDB query filter
|
||||||
|
*/
|
||||||
|
static filter(documents: IStoredDocument[], filter: Document): IStoredDocument[] {
|
||||||
|
if (!filter || Object.keys(filter).length === 0) {
|
||||||
|
return documents;
|
||||||
|
}
|
||||||
|
|
||||||
|
const query = new Query(filter);
|
||||||
|
return documents.filter(doc => query.test(doc));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test if a single document matches a filter
|
||||||
|
*/
|
||||||
|
static matches(document: Document, filter: Document): boolean {
|
||||||
|
if (!filter || Object.keys(filter).length === 0) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const query = new Query(filter);
|
||||||
|
return query.test(document);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find a single document matching the filter
|
||||||
|
*/
|
||||||
|
static findOne(documents: IStoredDocument[], filter: Document): IStoredDocument | null {
|
||||||
|
if (!filter || Object.keys(filter).length === 0) {
|
||||||
|
return documents[0] || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const query = new Query(filter);
|
||||||
|
for (const doc of documents) {
|
||||||
|
if (query.test(doc)) {
|
||||||
|
return doc;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sort documents by a sort specification
|
||||||
|
*/
|
||||||
|
static sort(documents: IStoredDocument[], sort: ISortSpecification): IStoredDocument[] {
|
||||||
|
if (!sort) {
|
||||||
|
return documents;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize sort specification to array of [field, direction] pairs
|
||||||
|
const sortFields: Array<[string, number]> = [];
|
||||||
|
|
||||||
|
if (Array.isArray(sort)) {
|
||||||
|
for (const [field, direction] of sort) {
|
||||||
|
sortFields.push([field, this.normalizeDirection(direction)]);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for (const [field, direction] of Object.entries(sort)) {
|
||||||
|
sortFields.push([field, this.normalizeDirection(direction)]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return [...documents].sort((a, b) => {
|
||||||
|
for (const [field, direction] of sortFields) {
|
||||||
|
const aVal = this.getNestedValue(a, field);
|
||||||
|
const bVal = this.getNestedValue(b, field);
|
||||||
|
|
||||||
|
const comparison = this.compareValues(aVal, bVal);
|
||||||
|
if (comparison !== 0) {
|
||||||
|
return comparison * direction;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply projection to documents
|
||||||
|
*/
|
||||||
|
static project(documents: IStoredDocument[], projection: Document): Document[] {
|
||||||
|
if (!projection || Object.keys(projection).length === 0) {
|
||||||
|
return documents;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine if this is inclusion or exclusion projection
|
||||||
|
const keys = Object.keys(projection);
|
||||||
|
const hasInclusion = keys.some(k => k !== '_id' && projection[k] === 1);
|
||||||
|
const hasExclusion = keys.some(k => k !== '_id' && projection[k] === 0);
|
||||||
|
|
||||||
|
// Can't mix inclusion and exclusion (except for _id)
|
||||||
|
if (hasInclusion && hasExclusion) {
|
||||||
|
throw new Error('Cannot mix inclusion and exclusion in projection');
|
||||||
|
}
|
||||||
|
|
||||||
|
return documents.map(doc => {
|
||||||
|
if (hasInclusion) {
|
||||||
|
// Inclusion projection
|
||||||
|
const result: Document = {};
|
||||||
|
|
||||||
|
// Handle _id
|
||||||
|
if (projection._id !== 0 && projection._id !== false) {
|
||||||
|
result._id = doc._id;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const key of keys) {
|
||||||
|
if (key === '_id') continue;
|
||||||
|
if (projection[key] === 1 || projection[key] === true) {
|
||||||
|
const value = this.getNestedValue(doc, key);
|
||||||
|
if (value !== undefined) {
|
||||||
|
this.setNestedValue(result, key, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
} else {
|
||||||
|
// Exclusion projection - start with copy and remove fields
|
||||||
|
const result = { ...doc };
|
||||||
|
|
||||||
|
for (const key of keys) {
|
||||||
|
if (projection[key] === 0 || projection[key] === false) {
|
||||||
|
this.deleteNestedValue(result, key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get distinct values for a field
|
||||||
|
*/
|
||||||
|
static distinct(documents: IStoredDocument[], field: string, filter?: Document): any[] {
|
||||||
|
let docs = documents;
|
||||||
|
if (filter && Object.keys(filter).length > 0) {
|
||||||
|
docs = this.filter(documents, filter);
|
||||||
|
}
|
||||||
|
|
||||||
|
const values = new Set<any>();
|
||||||
|
for (const doc of docs) {
|
||||||
|
const value = this.getNestedValue(doc, field);
|
||||||
|
if (value !== undefined) {
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
// For arrays, add each element
|
||||||
|
for (const v of value) {
|
||||||
|
values.add(this.toComparable(v));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
values.add(this.toComparable(value));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Array.from(values);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize sort direction to 1 or -1
|
||||||
|
*/
|
||||||
|
private static normalizeDirection(direction: ISortDirection): number {
|
||||||
|
if (typeof direction === 'number') {
|
||||||
|
return direction > 0 ? 1 : -1;
|
||||||
|
}
|
||||||
|
if (direction === 'asc' || direction === 'ascending') {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a nested value from an object using dot notation
|
||||||
|
*/
|
||||||
|
static getNestedValue(obj: any, path: string): any {
|
||||||
|
const parts = path.split('.');
|
||||||
|
let current = obj;
|
||||||
|
|
||||||
|
for (const part of parts) {
|
||||||
|
if (current === null || current === undefined) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (Array.isArray(current)) {
|
||||||
|
// Handle array access
|
||||||
|
const index = parseInt(part, 10);
|
||||||
|
if (!isNaN(index)) {
|
||||||
|
current = current[index];
|
||||||
|
} else {
|
||||||
|
// Get the field from all array elements
|
||||||
|
return current.map(item => this.getNestedValue(item, part)).flat();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
current = current[part];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return current;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set a nested value in an object using dot notation
|
||||||
|
*/
|
||||||
|
private static setNestedValue(obj: any, path: string, value: any): void {
|
||||||
|
const parts = path.split('.');
|
||||||
|
let current = obj;
|
||||||
|
|
||||||
|
for (let i = 0; i < parts.length - 1; i++) {
|
||||||
|
const part = parts[i];
|
||||||
|
if (!(part in current)) {
|
||||||
|
current[part] = {};
|
||||||
|
}
|
||||||
|
current = current[part];
|
||||||
|
}
|
||||||
|
|
||||||
|
current[parts[parts.length - 1]] = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a nested value from an object using dot notation
|
||||||
|
*/
|
||||||
|
private static deleteNestedValue(obj: any, path: string): void {
|
||||||
|
const parts = path.split('.');
|
||||||
|
let current = obj;
|
||||||
|
|
||||||
|
for (let i = 0; i < parts.length - 1; i++) {
|
||||||
|
const part = parts[i];
|
||||||
|
if (!(part in current)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
current = current[part];
|
||||||
|
}
|
||||||
|
|
||||||
|
delete current[parts[parts.length - 1]];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compare two values for sorting
|
||||||
|
*/
|
||||||
|
private static compareValues(a: any, b: any): number {
|
||||||
|
// Handle undefined/null
|
||||||
|
if (a === undefined && b === undefined) return 0;
|
||||||
|
if (a === undefined) return -1;
|
||||||
|
if (b === undefined) return 1;
|
||||||
|
if (a === null && b === null) return 0;
|
||||||
|
if (a === null) return -1;
|
||||||
|
if (b === null) return 1;
|
||||||
|
|
||||||
|
// Handle ObjectId
|
||||||
|
if (a instanceof plugins.bson.ObjectId && b instanceof plugins.bson.ObjectId) {
|
||||||
|
return a.toHexString().localeCompare(b.toHexString());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle dates
|
||||||
|
if (a instanceof Date && b instanceof Date) {
|
||||||
|
return a.getTime() - b.getTime();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle numbers
|
||||||
|
if (typeof a === 'number' && typeof b === 'number') {
|
||||||
|
return a - b;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle strings
|
||||||
|
if (typeof a === 'string' && typeof b === 'string') {
|
||||||
|
return a.localeCompare(b);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle booleans
|
||||||
|
if (typeof a === 'boolean' && typeof b === 'boolean') {
|
||||||
|
return (a ? 1 : 0) - (b ? 1 : 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fall back to string comparison
|
||||||
|
return String(a).localeCompare(String(b));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert a value to a comparable form (for distinct)
|
||||||
|
*/
|
||||||
|
private static toComparable(value: any): any {
|
||||||
|
if (value instanceof plugins.bson.ObjectId) {
|
||||||
|
return value.toHexString();
|
||||||
|
}
|
||||||
|
if (value instanceof Date) {
|
||||||
|
return value.toISOString();
|
||||||
|
}
|
||||||
|
if (typeof value === 'object' && value !== null) {
|
||||||
|
return JSON.stringify(value);
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
}
|
||||||
393
ts/ts_tsmdb/engine/QueryPlanner.ts
Normal file
393
ts/ts_tsmdb/engine/QueryPlanner.ts
Normal file
@@ -0,0 +1,393 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { Document, IStoredDocument } from '../types/interfaces.js';
|
||||||
|
import { IndexEngine } from './IndexEngine.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Query execution plan types
|
||||||
|
*/
|
||||||
|
export type TQueryPlanType = 'IXSCAN' | 'COLLSCAN' | 'FETCH' | 'IXSCAN_RANGE';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents a query execution plan
|
||||||
|
*/
|
||||||
|
export interface IQueryPlan {
|
||||||
|
/** The type of scan used */
|
||||||
|
type: TQueryPlanType;
|
||||||
|
/** Index name if using an index */
|
||||||
|
indexName?: string;
|
||||||
|
/** Index key specification */
|
||||||
|
indexKey?: Record<string, 1 | -1 | string>;
|
||||||
|
/** Whether the query can be fully satisfied by the index */
|
||||||
|
indexCovering: boolean;
|
||||||
|
/** Estimated selectivity (0-1, lower is more selective) */
|
||||||
|
selectivity: number;
|
||||||
|
/** Whether range operators are used */
|
||||||
|
usesRange: boolean;
|
||||||
|
/** Fields used from the index */
|
||||||
|
indexFieldsUsed: string[];
|
||||||
|
/** Filter conditions that must be applied post-index lookup */
|
||||||
|
residualFilter?: Document;
|
||||||
|
/** Explanation for debugging */
|
||||||
|
explanation: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter operator analysis
|
||||||
|
*/
|
||||||
|
interface IFilterOperatorInfo {
|
||||||
|
field: string;
|
||||||
|
operators: string[];
|
||||||
|
equality: boolean;
|
||||||
|
range: boolean;
|
||||||
|
in: boolean;
|
||||||
|
exists: boolean;
|
||||||
|
regex: boolean;
|
||||||
|
values: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* QueryPlanner - Analyzes queries and selects optimal execution plans
|
||||||
|
*/
|
||||||
|
export class QueryPlanner {
|
||||||
|
private indexEngine: IndexEngine;
|
||||||
|
|
||||||
|
constructor(indexEngine: IndexEngine) {
|
||||||
|
this.indexEngine = indexEngine;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate an execution plan for a query filter
|
||||||
|
*/
|
||||||
|
async plan(filter: Document): Promise<IQueryPlan> {
|
||||||
|
await this.indexEngine['initialize']();
|
||||||
|
|
||||||
|
// Empty filter = full collection scan
|
||||||
|
if (!filter || Object.keys(filter).length === 0) {
|
||||||
|
return {
|
||||||
|
type: 'COLLSCAN',
|
||||||
|
indexCovering: false,
|
||||||
|
selectivity: 1.0,
|
||||||
|
usesRange: false,
|
||||||
|
indexFieldsUsed: [],
|
||||||
|
explanation: 'No filter specified, full collection scan required',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Analyze the filter
|
||||||
|
const operatorInfo = this.analyzeFilter(filter);
|
||||||
|
|
||||||
|
// Get available indexes
|
||||||
|
const indexes = await this.indexEngine.listIndexes();
|
||||||
|
|
||||||
|
// Score each index
|
||||||
|
let bestPlan: IQueryPlan | null = null;
|
||||||
|
let bestScore = -1;
|
||||||
|
|
||||||
|
for (const index of indexes) {
|
||||||
|
const plan = this.scoreIndex(index, operatorInfo, filter);
|
||||||
|
if (plan.selectivity < 1.0) {
|
||||||
|
const score = this.calculateScore(plan);
|
||||||
|
if (score > bestScore) {
|
||||||
|
bestScore = score;
|
||||||
|
bestPlan = plan;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If no suitable index found, fall back to collection scan
|
||||||
|
if (!bestPlan || bestScore <= 0) {
|
||||||
|
return {
|
||||||
|
type: 'COLLSCAN',
|
||||||
|
indexCovering: false,
|
||||||
|
selectivity: 1.0,
|
||||||
|
usesRange: false,
|
||||||
|
indexFieldsUsed: [],
|
||||||
|
explanation: 'No suitable index found for this query',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return bestPlan;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Analyze filter to extract operator information per field
|
||||||
|
*/
|
||||||
|
private analyzeFilter(filter: Document, prefix = ''): Map<string, IFilterOperatorInfo> {
|
||||||
|
const result = new Map<string, IFilterOperatorInfo>();
|
||||||
|
|
||||||
|
for (const [key, value] of Object.entries(filter)) {
|
||||||
|
// Skip logical operators at the top level
|
||||||
|
if (key.startsWith('$')) {
|
||||||
|
if (key === '$and' && Array.isArray(value)) {
|
||||||
|
// Merge $and conditions
|
||||||
|
for (const subFilter of value) {
|
||||||
|
const subInfo = this.analyzeFilter(subFilter, prefix);
|
||||||
|
for (const [field, info] of subInfo) {
|
||||||
|
if (result.has(field)) {
|
||||||
|
// Merge operators
|
||||||
|
const existing = result.get(field)!;
|
||||||
|
existing.operators.push(...info.operators);
|
||||||
|
existing.equality = existing.equality || info.equality;
|
||||||
|
existing.range = existing.range || info.range;
|
||||||
|
existing.in = existing.in || info.in;
|
||||||
|
Object.assign(existing.values, info.values);
|
||||||
|
} else {
|
||||||
|
result.set(field, info);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const fullKey = prefix ? `${prefix}.${key}` : key;
|
||||||
|
const info: IFilterOperatorInfo = {
|
||||||
|
field: fullKey,
|
||||||
|
operators: [],
|
||||||
|
equality: false,
|
||||||
|
range: false,
|
||||||
|
in: false,
|
||||||
|
exists: false,
|
||||||
|
regex: false,
|
||||||
|
values: {},
|
||||||
|
};
|
||||||
|
|
||||||
|
if (typeof value !== 'object' || value === null || value instanceof plugins.bson.ObjectId || value instanceof Date) {
|
||||||
|
// Direct equality
|
||||||
|
info.equality = true;
|
||||||
|
info.operators.push('$eq');
|
||||||
|
info.values['$eq'] = value;
|
||||||
|
} else if (Array.isArray(value)) {
|
||||||
|
// Array equality (rare, but possible)
|
||||||
|
info.equality = true;
|
||||||
|
info.operators.push('$eq');
|
||||||
|
info.values['$eq'] = value;
|
||||||
|
} else {
|
||||||
|
// Operator object
|
||||||
|
for (const [op, opValue] of Object.entries(value)) {
|
||||||
|
if (op.startsWith('$')) {
|
||||||
|
info.operators.push(op);
|
||||||
|
info.values[op] = opValue;
|
||||||
|
|
||||||
|
switch (op) {
|
||||||
|
case '$eq':
|
||||||
|
info.equality = true;
|
||||||
|
break;
|
||||||
|
case '$ne':
|
||||||
|
case '$not':
|
||||||
|
// These can use indexes but with low selectivity
|
||||||
|
break;
|
||||||
|
case '$in':
|
||||||
|
info.in = true;
|
||||||
|
break;
|
||||||
|
case '$nin':
|
||||||
|
// Can't efficiently use indexes
|
||||||
|
break;
|
||||||
|
case '$gt':
|
||||||
|
case '$gte':
|
||||||
|
case '$lt':
|
||||||
|
case '$lte':
|
||||||
|
info.range = true;
|
||||||
|
break;
|
||||||
|
case '$exists':
|
||||||
|
info.exists = true;
|
||||||
|
break;
|
||||||
|
case '$regex':
|
||||||
|
info.regex = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Nested object - recurse
|
||||||
|
const nestedInfo = this.analyzeFilter({ [op]: opValue }, fullKey);
|
||||||
|
for (const [nestedField, nestedFieldInfo] of nestedInfo) {
|
||||||
|
result.set(nestedField, nestedFieldInfo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (info.operators.length > 0) {
|
||||||
|
result.set(fullKey, info);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Score an index for the given filter
|
||||||
|
*/
|
||||||
|
private scoreIndex(
|
||||||
|
index: { name: string; key: Record<string, any>; unique?: boolean; sparse?: boolean },
|
||||||
|
operatorInfo: Map<string, IFilterOperatorInfo>,
|
||||||
|
filter: Document
|
||||||
|
): IQueryPlan {
|
||||||
|
const indexFields = Object.keys(index.key);
|
||||||
|
const usedFields: string[] = [];
|
||||||
|
let usesRange = false;
|
||||||
|
let canUseIndex = true;
|
||||||
|
let selectivity = 1.0;
|
||||||
|
let residualFilter: Document | undefined;
|
||||||
|
|
||||||
|
// Check each index field in order
|
||||||
|
for (const field of indexFields) {
|
||||||
|
const info = operatorInfo.get(field);
|
||||||
|
if (!info) {
|
||||||
|
// Index field not in filter - stop here
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
usedFields.push(field);
|
||||||
|
|
||||||
|
// Calculate selectivity based on operator
|
||||||
|
if (info.equality) {
|
||||||
|
// Equality has high selectivity
|
||||||
|
selectivity *= 0.01; // Assume 1% match
|
||||||
|
} else if (info.in) {
|
||||||
|
// $in selectivity depends on array size
|
||||||
|
const inValues = info.values['$in'];
|
||||||
|
if (Array.isArray(inValues)) {
|
||||||
|
selectivity *= Math.min(0.5, inValues.length * 0.01);
|
||||||
|
} else {
|
||||||
|
selectivity *= 0.1;
|
||||||
|
}
|
||||||
|
} else if (info.range) {
|
||||||
|
// Range queries have moderate selectivity
|
||||||
|
selectivity *= 0.25;
|
||||||
|
usesRange = true;
|
||||||
|
// After range, can't use more index fields efficiently
|
||||||
|
break;
|
||||||
|
} else if (info.exists) {
|
||||||
|
// $exists can use sparse indexes
|
||||||
|
selectivity *= 0.5;
|
||||||
|
} else {
|
||||||
|
// Other operators may not be indexable
|
||||||
|
canUseIndex = false;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!canUseIndex || usedFields.length === 0) {
|
||||||
|
return {
|
||||||
|
type: 'COLLSCAN',
|
||||||
|
indexCovering: false,
|
||||||
|
selectivity: 1.0,
|
||||||
|
usesRange: false,
|
||||||
|
indexFieldsUsed: [],
|
||||||
|
explanation: `Index ${index.name} cannot be used for this query`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build residual filter for conditions not covered by index
|
||||||
|
const coveredFields = new Set(usedFields);
|
||||||
|
const residualConditions: Record<string, any> = {};
|
||||||
|
for (const [field, info] of operatorInfo) {
|
||||||
|
if (!coveredFields.has(field)) {
|
||||||
|
// This field isn't covered by the index
|
||||||
|
if (info.equality) {
|
||||||
|
residualConditions[field] = info.values['$eq'];
|
||||||
|
} else {
|
||||||
|
residualConditions[field] = info.values;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Object.keys(residualConditions).length > 0) {
|
||||||
|
residualFilter = residualConditions;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unique indexes have better selectivity for equality
|
||||||
|
if (index.unique && usedFields.length === indexFields.length) {
|
||||||
|
selectivity = Math.min(selectivity, 0.001); // At most 1 document
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: usesRange ? 'IXSCAN_RANGE' : 'IXSCAN',
|
||||||
|
indexName: index.name,
|
||||||
|
indexKey: index.key,
|
||||||
|
indexCovering: Object.keys(residualConditions).length === 0,
|
||||||
|
selectivity,
|
||||||
|
usesRange,
|
||||||
|
indexFieldsUsed: usedFields,
|
||||||
|
residualFilter,
|
||||||
|
explanation: `Using index ${index.name} on fields [${usedFields.join(', ')}]`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate overall score for a plan (higher is better)
|
||||||
|
*/
|
||||||
|
private calculateScore(plan: IQueryPlan): number {
|
||||||
|
let score = 0;
|
||||||
|
|
||||||
|
// Lower selectivity is better (fewer documents to fetch)
|
||||||
|
score += (1 - plan.selectivity) * 100;
|
||||||
|
|
||||||
|
// Index covering queries are best
|
||||||
|
if (plan.indexCovering) {
|
||||||
|
score += 50;
|
||||||
|
}
|
||||||
|
|
||||||
|
// More index fields used is better
|
||||||
|
score += plan.indexFieldsUsed.length * 10;
|
||||||
|
|
||||||
|
// Equality scans are better than range scans
|
||||||
|
if (!plan.usesRange) {
|
||||||
|
score += 20;
|
||||||
|
}
|
||||||
|
|
||||||
|
return score;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Explain a query - returns detailed plan information
|
||||||
|
*/
|
||||||
|
async explain(filter: Document): Promise<{
|
||||||
|
queryPlanner: {
|
||||||
|
plannerVersion: number;
|
||||||
|
namespace: string;
|
||||||
|
indexFilterSet: boolean;
|
||||||
|
winningPlan: IQueryPlan;
|
||||||
|
rejectedPlans: IQueryPlan[];
|
||||||
|
};
|
||||||
|
}> {
|
||||||
|
await this.indexEngine['initialize']();
|
||||||
|
|
||||||
|
// Analyze the filter
|
||||||
|
const operatorInfo = this.analyzeFilter(filter);
|
||||||
|
|
||||||
|
// Get available indexes
|
||||||
|
const indexes = await this.indexEngine.listIndexes();
|
||||||
|
|
||||||
|
// Score all indexes
|
||||||
|
const plans: IQueryPlan[] = [];
|
||||||
|
|
||||||
|
for (const index of indexes) {
|
||||||
|
const plan = this.scoreIndex(index, operatorInfo, filter);
|
||||||
|
plans.push(plan);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add collection scan as fallback
|
||||||
|
plans.push({
|
||||||
|
type: 'COLLSCAN',
|
||||||
|
indexCovering: false,
|
||||||
|
selectivity: 1.0,
|
||||||
|
usesRange: false,
|
||||||
|
indexFieldsUsed: [],
|
||||||
|
explanation: 'Full collection scan',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Sort by score (best first)
|
||||||
|
plans.sort((a, b) => this.calculateScore(b) - this.calculateScore(a));
|
||||||
|
|
||||||
|
return {
|
||||||
|
queryPlanner: {
|
||||||
|
plannerVersion: 1,
|
||||||
|
namespace: `${this.indexEngine['dbName']}.${this.indexEngine['collName']}`,
|
||||||
|
indexFilterSet: false,
|
||||||
|
winningPlan: plans[0],
|
||||||
|
rejectedPlans: plans.slice(1),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
292
ts/ts_tsmdb/engine/SessionEngine.ts
Normal file
292
ts/ts_tsmdb/engine/SessionEngine.ts
Normal file
@@ -0,0 +1,292 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { TransactionEngine } from './TransactionEngine.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session state
|
||||||
|
*/
|
||||||
|
export interface ISession {
|
||||||
|
/** Session ID (UUID) */
|
||||||
|
id: string;
|
||||||
|
/** Timestamp when the session was created */
|
||||||
|
createdAt: number;
|
||||||
|
/** Timestamp of the last activity */
|
||||||
|
lastActivityAt: number;
|
||||||
|
/** Current transaction ID if any */
|
||||||
|
txnId?: string;
|
||||||
|
/** Transaction number for ordering */
|
||||||
|
txnNumber?: number;
|
||||||
|
/** Whether the session is in a transaction */
|
||||||
|
inTransaction: boolean;
|
||||||
|
/** Session metadata */
|
||||||
|
metadata?: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session engine options
|
||||||
|
*/
|
||||||
|
export interface ISessionEngineOptions {
|
||||||
|
/** Session timeout in milliseconds (default: 30 minutes) */
|
||||||
|
sessionTimeoutMs?: number;
|
||||||
|
/** Interval to check for expired sessions in ms (default: 60 seconds) */
|
||||||
|
cleanupIntervalMs?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session engine for managing client sessions
|
||||||
|
* - Tracks session lifecycle (create, touch, end)
|
||||||
|
* - Links sessions to transactions
|
||||||
|
* - Auto-aborts transactions on session expiry
|
||||||
|
*/
|
||||||
|
export class SessionEngine {
|
||||||
|
private sessions: Map<string, ISession> = new Map();
|
||||||
|
private sessionTimeoutMs: number;
|
||||||
|
private cleanupInterval?: ReturnType<typeof setInterval>;
|
||||||
|
private transactionEngine?: TransactionEngine;
|
||||||
|
|
||||||
|
constructor(options?: ISessionEngineOptions) {
|
||||||
|
this.sessionTimeoutMs = options?.sessionTimeoutMs ?? 30 * 60 * 1000; // 30 minutes default
|
||||||
|
const cleanupIntervalMs = options?.cleanupIntervalMs ?? 60 * 1000; // 1 minute default
|
||||||
|
|
||||||
|
// Start cleanup interval
|
||||||
|
this.cleanupInterval = setInterval(() => {
|
||||||
|
this.cleanupExpiredSessions();
|
||||||
|
}, cleanupIntervalMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the transaction engine to use for auto-abort
|
||||||
|
*/
|
||||||
|
setTransactionEngine(engine: TransactionEngine): void {
|
||||||
|
this.transactionEngine = engine;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start a new session
|
||||||
|
*/
|
||||||
|
startSession(sessionId?: string, metadata?: Record<string, any>): ISession {
|
||||||
|
const id = sessionId ?? new plugins.bson.UUID().toHexString();
|
||||||
|
const now = Date.now();
|
||||||
|
|
||||||
|
const session: ISession = {
|
||||||
|
id,
|
||||||
|
createdAt: now,
|
||||||
|
lastActivityAt: now,
|
||||||
|
inTransaction: false,
|
||||||
|
metadata,
|
||||||
|
};
|
||||||
|
|
||||||
|
this.sessions.set(id, session);
|
||||||
|
return session;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a session by ID
|
||||||
|
*/
|
||||||
|
getSession(sessionId: string): ISession | undefined {
|
||||||
|
const session = this.sessions.get(sessionId);
|
||||||
|
if (session && this.isSessionExpired(session)) {
|
||||||
|
// Session expired, clean it up
|
||||||
|
this.endSession(sessionId);
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return session;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Touch a session to update last activity time
|
||||||
|
*/
|
||||||
|
touchSession(sessionId: string): boolean {
|
||||||
|
const session = this.sessions.get(sessionId);
|
||||||
|
if (!session) return false;
|
||||||
|
|
||||||
|
if (this.isSessionExpired(session)) {
|
||||||
|
this.endSession(sessionId);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
session.lastActivityAt = Date.now();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* End a session explicitly
|
||||||
|
* This will also abort any active transaction
|
||||||
|
*/
|
||||||
|
async endSession(sessionId: string): Promise<boolean> {
|
||||||
|
const session = this.sessions.get(sessionId);
|
||||||
|
if (!session) return false;
|
||||||
|
|
||||||
|
// If session has an active transaction, abort it
|
||||||
|
if (session.inTransaction && session.txnId && this.transactionEngine) {
|
||||||
|
try {
|
||||||
|
await this.transactionEngine.abortTransaction(session.txnId);
|
||||||
|
} catch (e) {
|
||||||
|
// Ignore abort errors during cleanup
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.sessions.delete(sessionId);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start a transaction in a session
|
||||||
|
*/
|
||||||
|
startTransaction(sessionId: string, txnId: string, txnNumber?: number): boolean {
|
||||||
|
const session = this.sessions.get(sessionId);
|
||||||
|
if (!session) return false;
|
||||||
|
|
||||||
|
if (this.isSessionExpired(session)) {
|
||||||
|
this.endSession(sessionId);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
session.txnId = txnId;
|
||||||
|
session.txnNumber = txnNumber;
|
||||||
|
session.inTransaction = true;
|
||||||
|
session.lastActivityAt = Date.now();
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* End a transaction in a session (commit or abort)
|
||||||
|
*/
|
||||||
|
endTransaction(sessionId: string): boolean {
|
||||||
|
const session = this.sessions.get(sessionId);
|
||||||
|
if (!session) return false;
|
||||||
|
|
||||||
|
session.txnId = undefined;
|
||||||
|
session.txnNumber = undefined;
|
||||||
|
session.inTransaction = false;
|
||||||
|
session.lastActivityAt = Date.now();
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get transaction ID for a session
|
||||||
|
*/
|
||||||
|
getTransactionId(sessionId: string): string | undefined {
|
||||||
|
const session = this.sessions.get(sessionId);
|
||||||
|
return session?.txnId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if session is in a transaction
|
||||||
|
*/
|
||||||
|
isInTransaction(sessionId: string): boolean {
|
||||||
|
const session = this.sessions.get(sessionId);
|
||||||
|
return session?.inTransaction ?? false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a session is expired
|
||||||
|
*/
|
||||||
|
isSessionExpired(session: ISession): boolean {
|
||||||
|
return Date.now() - session.lastActivityAt > this.sessionTimeoutMs;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cleanup expired sessions
|
||||||
|
* This is called periodically by the cleanup interval
|
||||||
|
*/
|
||||||
|
private async cleanupExpiredSessions(): Promise<void> {
|
||||||
|
const expiredSessions: string[] = [];
|
||||||
|
|
||||||
|
for (const [id, session] of this.sessions) {
|
||||||
|
if (this.isSessionExpired(session)) {
|
||||||
|
expiredSessions.push(id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// End all expired sessions (this will also abort their transactions)
|
||||||
|
for (const sessionId of expiredSessions) {
|
||||||
|
await this.endSession(sessionId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all active sessions
|
||||||
|
*/
|
||||||
|
listSessions(): ISession[] {
|
||||||
|
const activeSessions: ISession[] = [];
|
||||||
|
for (const session of this.sessions.values()) {
|
||||||
|
if (!this.isSessionExpired(session)) {
|
||||||
|
activeSessions.push(session);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return activeSessions;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get session count
|
||||||
|
*/
|
||||||
|
getSessionCount(): number {
|
||||||
|
return this.sessions.size;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get sessions with active transactions
|
||||||
|
*/
|
||||||
|
getSessionsWithTransactions(): ISession[] {
|
||||||
|
return this.listSessions().filter(s => s.inTransaction);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh session timeout
|
||||||
|
*/
|
||||||
|
refreshSession(sessionId: string): boolean {
|
||||||
|
return this.touchSession(sessionId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Close the session engine and cleanup
|
||||||
|
*/
|
||||||
|
close(): void {
|
||||||
|
if (this.cleanupInterval) {
|
||||||
|
clearInterval(this.cleanupInterval);
|
||||||
|
this.cleanupInterval = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear all sessions
|
||||||
|
this.sessions.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get or create a session for a given session ID
|
||||||
|
* Useful for handling MongoDB driver session requests
|
||||||
|
*/
|
||||||
|
getOrCreateSession(sessionId: string): ISession {
|
||||||
|
let session = this.getSession(sessionId);
|
||||||
|
if (!session) {
|
||||||
|
session = this.startSession(sessionId);
|
||||||
|
} else {
|
||||||
|
this.touchSession(sessionId);
|
||||||
|
}
|
||||||
|
return session;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract session ID from MongoDB lsid (logical session ID)
|
||||||
|
*/
|
||||||
|
static extractSessionId(lsid: any): string | undefined {
|
||||||
|
if (!lsid) return undefined;
|
||||||
|
|
||||||
|
// MongoDB session ID format: { id: UUID }
|
||||||
|
if (lsid.id) {
|
||||||
|
if (lsid.id instanceof plugins.bson.UUID) {
|
||||||
|
return lsid.id.toHexString();
|
||||||
|
}
|
||||||
|
if (typeof lsid.id === 'string') {
|
||||||
|
return lsid.id;
|
||||||
|
}
|
||||||
|
if (lsid.id.$binary?.base64) {
|
||||||
|
// Binary UUID format
|
||||||
|
return Buffer.from(lsid.id.$binary.base64, 'base64').toString('hex');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
351
ts/ts_tsmdb/engine/TransactionEngine.ts
Normal file
351
ts/ts_tsmdb/engine/TransactionEngine.ts
Normal file
@@ -0,0 +1,351 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { IStorageAdapter } from '../storage/IStorageAdapter.js';
|
||||||
|
import type { Document, IStoredDocument, ITransactionOptions } from '../types/interfaces.js';
|
||||||
|
import { TsmdbTransactionError, TsmdbWriteConflictError } from '../errors/TsmdbErrors.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transaction state
|
||||||
|
*/
|
||||||
|
export interface ITransactionState {
|
||||||
|
id: string;
|
||||||
|
sessionId: string;
|
||||||
|
startTime: plugins.bson.Timestamp;
|
||||||
|
status: 'active' | 'committed' | 'aborted';
|
||||||
|
readSet: Map<string, Set<string>>; // ns -> document _ids read
|
||||||
|
writeSet: Map<string, Map<string, { op: 'insert' | 'update' | 'delete'; doc?: IStoredDocument; originalDoc?: IStoredDocument }>>; // ns -> _id -> operation
|
||||||
|
snapshots: Map<string, IStoredDocument[]>; // ns -> snapshot of documents
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transaction engine for ACID transaction support
|
||||||
|
*/
|
||||||
|
export class TransactionEngine {
|
||||||
|
private storage: IStorageAdapter;
|
||||||
|
private transactions: Map<string, ITransactionState> = new Map();
|
||||||
|
private txnCounter = 0;
|
||||||
|
|
||||||
|
constructor(storage: IStorageAdapter) {
|
||||||
|
this.storage = storage;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start a new transaction
|
||||||
|
*/
|
||||||
|
startTransaction(sessionId: string, options?: ITransactionOptions): string {
|
||||||
|
this.txnCounter++;
|
||||||
|
const txnId = `txn_${sessionId}_${this.txnCounter}`;
|
||||||
|
|
||||||
|
const transaction: ITransactionState = {
|
||||||
|
id: txnId,
|
||||||
|
sessionId,
|
||||||
|
startTime: new plugins.bson.Timestamp({ t: Math.floor(Date.now() / 1000), i: this.txnCounter }),
|
||||||
|
status: 'active',
|
||||||
|
readSet: new Map(),
|
||||||
|
writeSet: new Map(),
|
||||||
|
snapshots: new Map(),
|
||||||
|
};
|
||||||
|
|
||||||
|
this.transactions.set(txnId, transaction);
|
||||||
|
return txnId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a transaction by ID
|
||||||
|
*/
|
||||||
|
getTransaction(txnId: string): ITransactionState | undefined {
|
||||||
|
return this.transactions.get(txnId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a transaction is active
|
||||||
|
*/
|
||||||
|
isActive(txnId: string): boolean {
|
||||||
|
const txn = this.transactions.get(txnId);
|
||||||
|
return txn?.status === 'active';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get or create a snapshot for a namespace
|
||||||
|
*/
|
||||||
|
async getSnapshot(txnId: string, dbName: string, collName: string): Promise<IStoredDocument[]> {
|
||||||
|
const txn = this.transactions.get(txnId);
|
||||||
|
if (!txn || txn.status !== 'active') {
|
||||||
|
throw new TsmdbTransactionError('Transaction is not active');
|
||||||
|
}
|
||||||
|
|
||||||
|
const ns = `${dbName}.${collName}`;
|
||||||
|
if (!txn.snapshots.has(ns)) {
|
||||||
|
const snapshot = await this.storage.createSnapshot(dbName, collName);
|
||||||
|
txn.snapshots.set(ns, snapshot);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply transaction writes to snapshot
|
||||||
|
const snapshot = txn.snapshots.get(ns)!;
|
||||||
|
const writes = txn.writeSet.get(ns);
|
||||||
|
|
||||||
|
if (!writes) {
|
||||||
|
return snapshot;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a modified view of the snapshot
|
||||||
|
const result: IStoredDocument[] = [];
|
||||||
|
const deletedIds = new Set<string>();
|
||||||
|
const modifiedDocs = new Map<string, IStoredDocument>();
|
||||||
|
|
||||||
|
for (const [idStr, write] of writes) {
|
||||||
|
if (write.op === 'delete') {
|
||||||
|
deletedIds.add(idStr);
|
||||||
|
} else if (write.op === 'update' || write.op === 'insert') {
|
||||||
|
if (write.doc) {
|
||||||
|
modifiedDocs.set(idStr, write.doc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add existing documents (not deleted, possibly modified)
|
||||||
|
for (const doc of snapshot) {
|
||||||
|
const idStr = doc._id.toHexString();
|
||||||
|
if (deletedIds.has(idStr)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (modifiedDocs.has(idStr)) {
|
||||||
|
result.push(modifiedDocs.get(idStr)!);
|
||||||
|
modifiedDocs.delete(idStr);
|
||||||
|
} else {
|
||||||
|
result.push(doc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add new documents (inserts)
|
||||||
|
for (const doc of modifiedDocs.values()) {
|
||||||
|
result.push(doc);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record a read operation
|
||||||
|
*/
|
||||||
|
recordRead(txnId: string, dbName: string, collName: string, docIds: string[]): void {
|
||||||
|
const txn = this.transactions.get(txnId);
|
||||||
|
if (!txn || txn.status !== 'active') return;
|
||||||
|
|
||||||
|
const ns = `${dbName}.${collName}`;
|
||||||
|
if (!txn.readSet.has(ns)) {
|
||||||
|
txn.readSet.set(ns, new Set());
|
||||||
|
}
|
||||||
|
|
||||||
|
const readSet = txn.readSet.get(ns)!;
|
||||||
|
for (const id of docIds) {
|
||||||
|
readSet.add(id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record a write operation (insert)
|
||||||
|
*/
|
||||||
|
recordInsert(txnId: string, dbName: string, collName: string, doc: IStoredDocument): void {
|
||||||
|
const txn = this.transactions.get(txnId);
|
||||||
|
if (!txn || txn.status !== 'active') {
|
||||||
|
throw new TsmdbTransactionError('Transaction is not active');
|
||||||
|
}
|
||||||
|
|
||||||
|
const ns = `${dbName}.${collName}`;
|
||||||
|
if (!txn.writeSet.has(ns)) {
|
||||||
|
txn.writeSet.set(ns, new Map());
|
||||||
|
}
|
||||||
|
|
||||||
|
txn.writeSet.get(ns)!.set(doc._id.toHexString(), {
|
||||||
|
op: 'insert',
|
||||||
|
doc,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record a write operation (update)
|
||||||
|
*/
|
||||||
|
recordUpdate(
|
||||||
|
txnId: string,
|
||||||
|
dbName: string,
|
||||||
|
collName: string,
|
||||||
|
originalDoc: IStoredDocument,
|
||||||
|
updatedDoc: IStoredDocument
|
||||||
|
): void {
|
||||||
|
const txn = this.transactions.get(txnId);
|
||||||
|
if (!txn || txn.status !== 'active') {
|
||||||
|
throw new TsmdbTransactionError('Transaction is not active');
|
||||||
|
}
|
||||||
|
|
||||||
|
const ns = `${dbName}.${collName}`;
|
||||||
|
if (!txn.writeSet.has(ns)) {
|
||||||
|
txn.writeSet.set(ns, new Map());
|
||||||
|
}
|
||||||
|
|
||||||
|
const idStr = originalDoc._id.toHexString();
|
||||||
|
const existing = txn.writeSet.get(ns)!.get(idStr);
|
||||||
|
|
||||||
|
// If we already have a write for this document, update it
|
||||||
|
if (existing) {
|
||||||
|
existing.doc = updatedDoc;
|
||||||
|
} else {
|
||||||
|
txn.writeSet.get(ns)!.set(idStr, {
|
||||||
|
op: 'update',
|
||||||
|
doc: updatedDoc,
|
||||||
|
originalDoc,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record a write operation (delete)
|
||||||
|
*/
|
||||||
|
recordDelete(txnId: string, dbName: string, collName: string, doc: IStoredDocument): void {
|
||||||
|
const txn = this.transactions.get(txnId);
|
||||||
|
if (!txn || txn.status !== 'active') {
|
||||||
|
throw new TsmdbTransactionError('Transaction is not active');
|
||||||
|
}
|
||||||
|
|
||||||
|
const ns = `${dbName}.${collName}`;
|
||||||
|
if (!txn.writeSet.has(ns)) {
|
||||||
|
txn.writeSet.set(ns, new Map());
|
||||||
|
}
|
||||||
|
|
||||||
|
const idStr = doc._id.toHexString();
|
||||||
|
const existing = txn.writeSet.get(ns)!.get(idStr);
|
||||||
|
|
||||||
|
if (existing && existing.op === 'insert') {
|
||||||
|
// If we inserted and then deleted, just remove the write
|
||||||
|
txn.writeSet.get(ns)!.delete(idStr);
|
||||||
|
} else {
|
||||||
|
txn.writeSet.get(ns)!.set(idStr, {
|
||||||
|
op: 'delete',
|
||||||
|
originalDoc: doc,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Commit a transaction
|
||||||
|
*/
|
||||||
|
async commitTransaction(txnId: string): Promise<void> {
|
||||||
|
const txn = this.transactions.get(txnId);
|
||||||
|
if (!txn) {
|
||||||
|
throw new TsmdbTransactionError('Transaction not found');
|
||||||
|
}
|
||||||
|
if (txn.status !== 'active') {
|
||||||
|
throw new TsmdbTransactionError(`Cannot commit transaction in state: ${txn.status}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for write conflicts
|
||||||
|
for (const [ns, writes] of txn.writeSet) {
|
||||||
|
const [dbName, collName] = ns.split('.');
|
||||||
|
const ids = Array.from(writes.keys()).map(id => new plugins.bson.ObjectId(id));
|
||||||
|
|
||||||
|
const hasConflicts = await this.storage.hasConflicts(dbName, collName, ids, txn.startTime);
|
||||||
|
if (hasConflicts) {
|
||||||
|
txn.status = 'aborted';
|
||||||
|
throw new TsmdbWriteConflictError();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply all writes
|
||||||
|
for (const [ns, writes] of txn.writeSet) {
|
||||||
|
const [dbName, collName] = ns.split('.');
|
||||||
|
|
||||||
|
for (const [idStr, write] of writes) {
|
||||||
|
switch (write.op) {
|
||||||
|
case 'insert':
|
||||||
|
if (write.doc) {
|
||||||
|
await this.storage.insertOne(dbName, collName, write.doc);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'update':
|
||||||
|
if (write.doc) {
|
||||||
|
await this.storage.updateById(dbName, collName, new plugins.bson.ObjectId(idStr), write.doc);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'delete':
|
||||||
|
await this.storage.deleteById(dbName, collName, new plugins.bson.ObjectId(idStr));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
txn.status = 'committed';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Abort a transaction
|
||||||
|
*/
|
||||||
|
async abortTransaction(txnId: string): Promise<void> {
|
||||||
|
const txn = this.transactions.get(txnId);
|
||||||
|
if (!txn) {
|
||||||
|
throw new TsmdbTransactionError('Transaction not found');
|
||||||
|
}
|
||||||
|
if (txn.status !== 'active') {
|
||||||
|
// Already committed or aborted, just return
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Simply discard all buffered writes
|
||||||
|
txn.writeSet.clear();
|
||||||
|
txn.readSet.clear();
|
||||||
|
txn.snapshots.clear();
|
||||||
|
txn.status = 'aborted';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* End a transaction (cleanup)
|
||||||
|
*/
|
||||||
|
endTransaction(txnId: string): void {
|
||||||
|
this.transactions.delete(txnId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all pending writes for a namespace
|
||||||
|
*/
|
||||||
|
getPendingWrites(txnId: string, dbName: string, collName: string): Map<string, { op: 'insert' | 'update' | 'delete'; doc?: IStoredDocument }> | undefined {
|
||||||
|
const txn = this.transactions.get(txnId);
|
||||||
|
if (!txn) return undefined;
|
||||||
|
|
||||||
|
const ns = `${dbName}.${collName}`;
|
||||||
|
return txn.writeSet.get(ns);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a callback within a transaction, with automatic retry on conflict
|
||||||
|
*/
|
||||||
|
async withTransaction<T>(
|
||||||
|
sessionId: string,
|
||||||
|
callback: (txnId: string) => Promise<T>,
|
||||||
|
options?: ITransactionOptions & { maxRetries?: number }
|
||||||
|
): Promise<T> {
|
||||||
|
const maxRetries = options?.maxRetries ?? 3;
|
||||||
|
let lastError: Error | undefined;
|
||||||
|
|
||||||
|
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
||||||
|
const txnId = this.startTransaction(sessionId, options);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await callback(txnId);
|
||||||
|
await this.commitTransaction(txnId);
|
||||||
|
this.endTransaction(txnId);
|
||||||
|
return result;
|
||||||
|
} catch (error: any) {
|
||||||
|
await this.abortTransaction(txnId);
|
||||||
|
this.endTransaction(txnId);
|
||||||
|
|
||||||
|
if (error instanceof TsmdbWriteConflictError && attempt < maxRetries - 1) {
|
||||||
|
// Retry on write conflict
|
||||||
|
lastError = error;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw lastError || new TsmdbTransactionError('Transaction failed after max retries');
|
||||||
|
}
|
||||||
|
}
|
||||||
506
ts/ts_tsmdb/engine/UpdateEngine.ts
Normal file
506
ts/ts_tsmdb/engine/UpdateEngine.ts
Normal file
@@ -0,0 +1,506 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { Document, IStoredDocument } from '../types/interfaces.js';
|
||||||
|
import { QueryEngine } from './QueryEngine.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update engine for MongoDB-compatible update operations
|
||||||
|
*/
|
||||||
|
export class UpdateEngine {
|
||||||
|
/**
|
||||||
|
* Apply an update specification to a document
|
||||||
|
* Returns the updated document or null if no update was applied
|
||||||
|
*/
|
||||||
|
static applyUpdate(document: IStoredDocument, update: Document, arrayFilters?: Document[]): IStoredDocument {
|
||||||
|
// Check if this is an aggregation pipeline update
|
||||||
|
if (Array.isArray(update)) {
|
||||||
|
// Aggregation pipeline updates are not yet supported
|
||||||
|
throw new Error('Aggregation pipeline updates are not yet supported');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is a replacement (no $ operators at top level)
|
||||||
|
const hasOperators = Object.keys(update).some(k => k.startsWith('$'));
|
||||||
|
|
||||||
|
if (!hasOperators) {
|
||||||
|
// This is a replacement - preserve _id
|
||||||
|
return {
|
||||||
|
_id: document._id,
|
||||||
|
...update,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply update operators
|
||||||
|
const result = this.deepClone(document);
|
||||||
|
|
||||||
|
for (const [operator, operand] of Object.entries(update)) {
|
||||||
|
switch (operator) {
|
||||||
|
case '$set':
|
||||||
|
this.applySet(result, operand);
|
||||||
|
break;
|
||||||
|
case '$unset':
|
||||||
|
this.applyUnset(result, operand);
|
||||||
|
break;
|
||||||
|
case '$inc':
|
||||||
|
this.applyInc(result, operand);
|
||||||
|
break;
|
||||||
|
case '$mul':
|
||||||
|
this.applyMul(result, operand);
|
||||||
|
break;
|
||||||
|
case '$min':
|
||||||
|
this.applyMin(result, operand);
|
||||||
|
break;
|
||||||
|
case '$max':
|
||||||
|
this.applyMax(result, operand);
|
||||||
|
break;
|
||||||
|
case '$rename':
|
||||||
|
this.applyRename(result, operand);
|
||||||
|
break;
|
||||||
|
case '$currentDate':
|
||||||
|
this.applyCurrentDate(result, operand);
|
||||||
|
break;
|
||||||
|
case '$setOnInsert':
|
||||||
|
// Only applied during upsert insert, handled elsewhere
|
||||||
|
break;
|
||||||
|
case '$push':
|
||||||
|
this.applyPush(result, operand, arrayFilters);
|
||||||
|
break;
|
||||||
|
case '$pop':
|
||||||
|
this.applyPop(result, operand);
|
||||||
|
break;
|
||||||
|
case '$pull':
|
||||||
|
this.applyPull(result, operand, arrayFilters);
|
||||||
|
break;
|
||||||
|
case '$pullAll':
|
||||||
|
this.applyPullAll(result, operand);
|
||||||
|
break;
|
||||||
|
case '$addToSet':
|
||||||
|
this.applyAddToSet(result, operand);
|
||||||
|
break;
|
||||||
|
case '$bit':
|
||||||
|
this.applyBit(result, operand);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new Error(`Unknown update operator: ${operator}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply $setOnInsert for upsert operations
|
||||||
|
*/
|
||||||
|
static applySetOnInsert(document: IStoredDocument, setOnInsert: Document): IStoredDocument {
|
||||||
|
const result = this.deepClone(document);
|
||||||
|
this.applySet(result, setOnInsert);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deep clone a document
|
||||||
|
*/
|
||||||
|
private static deepClone(obj: any): any {
|
||||||
|
if (obj === null || typeof obj !== 'object') {
|
||||||
|
return obj;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (obj instanceof plugins.bson.ObjectId) {
|
||||||
|
return new plugins.bson.ObjectId(obj.toHexString());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (obj instanceof Date) {
|
||||||
|
return new Date(obj.getTime());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (obj instanceof plugins.bson.Timestamp) {
|
||||||
|
return new plugins.bson.Timestamp({ t: obj.high, i: obj.low });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(obj)) {
|
||||||
|
return obj.map(item => this.deepClone(item));
|
||||||
|
}
|
||||||
|
|
||||||
|
const cloned: any = {};
|
||||||
|
for (const key of Object.keys(obj)) {
|
||||||
|
cloned[key] = this.deepClone(obj[key]);
|
||||||
|
}
|
||||||
|
return cloned;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set a nested value
|
||||||
|
*/
|
||||||
|
private static setNestedValue(obj: any, path: string, value: any): void {
|
||||||
|
const parts = path.split('.');
|
||||||
|
let current = obj;
|
||||||
|
|
||||||
|
for (let i = 0; i < parts.length - 1; i++) {
|
||||||
|
const part = parts[i];
|
||||||
|
|
||||||
|
// Handle array index notation
|
||||||
|
const arrayMatch = part.match(/^(\w+)\[(\d+)\]$/);
|
||||||
|
if (arrayMatch) {
|
||||||
|
const [, fieldName, indexStr] = arrayMatch;
|
||||||
|
const index = parseInt(indexStr, 10);
|
||||||
|
if (!(fieldName in current)) {
|
||||||
|
current[fieldName] = [];
|
||||||
|
}
|
||||||
|
if (!current[fieldName][index]) {
|
||||||
|
current[fieldName][index] = {};
|
||||||
|
}
|
||||||
|
current = current[fieldName][index];
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle numeric index (array positional)
|
||||||
|
const numIndex = parseInt(part, 10);
|
||||||
|
if (!isNaN(numIndex) && Array.isArray(current)) {
|
||||||
|
if (!current[numIndex]) {
|
||||||
|
current[numIndex] = {};
|
||||||
|
}
|
||||||
|
current = current[numIndex];
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(part in current) || current[part] === null) {
|
||||||
|
current[part] = {};
|
||||||
|
}
|
||||||
|
current = current[part];
|
||||||
|
}
|
||||||
|
|
||||||
|
const lastPart = parts[parts.length - 1];
|
||||||
|
const numIndex = parseInt(lastPart, 10);
|
||||||
|
if (!isNaN(numIndex) && Array.isArray(current)) {
|
||||||
|
current[numIndex] = value;
|
||||||
|
} else {
|
||||||
|
current[lastPart] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a nested value
|
||||||
|
*/
|
||||||
|
private static getNestedValue(obj: any, path: string): any {
|
||||||
|
return QueryEngine.getNestedValue(obj, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a nested value
|
||||||
|
*/
|
||||||
|
private static deleteNestedValue(obj: any, path: string): void {
|
||||||
|
const parts = path.split('.');
|
||||||
|
let current = obj;
|
||||||
|
|
||||||
|
for (let i = 0; i < parts.length - 1; i++) {
|
||||||
|
const part = parts[i];
|
||||||
|
if (!(part in current)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
current = current[part];
|
||||||
|
}
|
||||||
|
|
||||||
|
delete current[parts[parts.length - 1]];
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Field Update Operators
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
private static applySet(doc: any, fields: Document): void {
|
||||||
|
for (const [path, value] of Object.entries(fields)) {
|
||||||
|
this.setNestedValue(doc, path, this.deepClone(value));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static applyUnset(doc: any, fields: Document): void {
|
||||||
|
for (const path of Object.keys(fields)) {
|
||||||
|
this.deleteNestedValue(doc, path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static applyInc(doc: any, fields: Document): void {
|
||||||
|
for (const [path, value] of Object.entries(fields)) {
|
||||||
|
const current = this.getNestedValue(doc, path) || 0;
|
||||||
|
if (typeof current !== 'number') {
|
||||||
|
throw new Error(`Cannot apply $inc to non-numeric field: ${path}`);
|
||||||
|
}
|
||||||
|
this.setNestedValue(doc, path, current + (value as number));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static applyMul(doc: any, fields: Document): void {
|
||||||
|
for (const [path, value] of Object.entries(fields)) {
|
||||||
|
const current = this.getNestedValue(doc, path) || 0;
|
||||||
|
if (typeof current !== 'number') {
|
||||||
|
throw new Error(`Cannot apply $mul to non-numeric field: ${path}`);
|
||||||
|
}
|
||||||
|
this.setNestedValue(doc, path, current * (value as number));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static applyMin(doc: any, fields: Document): void {
|
||||||
|
for (const [path, value] of Object.entries(fields)) {
|
||||||
|
const current = this.getNestedValue(doc, path);
|
||||||
|
if (current === undefined || this.compareValues(value, current) < 0) {
|
||||||
|
this.setNestedValue(doc, path, this.deepClone(value));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static applyMax(doc: any, fields: Document): void {
|
||||||
|
for (const [path, value] of Object.entries(fields)) {
|
||||||
|
const current = this.getNestedValue(doc, path);
|
||||||
|
if (current === undefined || this.compareValues(value, current) > 0) {
|
||||||
|
this.setNestedValue(doc, path, this.deepClone(value));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static applyRename(doc: any, fields: Document): void {
|
||||||
|
for (const [oldPath, newPath] of Object.entries(fields)) {
|
||||||
|
const value = this.getNestedValue(doc, oldPath);
|
||||||
|
if (value !== undefined) {
|
||||||
|
this.deleteNestedValue(doc, oldPath);
|
||||||
|
this.setNestedValue(doc, newPath as string, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static applyCurrentDate(doc: any, fields: Document): void {
|
||||||
|
for (const [path, spec] of Object.entries(fields)) {
|
||||||
|
if (spec === true) {
|
||||||
|
this.setNestedValue(doc, path, new Date());
|
||||||
|
} else if (typeof spec === 'object' && spec.$type === 'date') {
|
||||||
|
this.setNestedValue(doc, path, new Date());
|
||||||
|
} else if (typeof spec === 'object' && spec.$type === 'timestamp') {
|
||||||
|
this.setNestedValue(doc, path, new plugins.bson.Timestamp({ t: Math.floor(Date.now() / 1000), i: 0 }));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Array Update Operators
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
private static applyPush(doc: any, fields: Document, arrayFilters?: Document[]): void {
|
||||||
|
for (const [path, spec] of Object.entries(fields)) {
|
||||||
|
let arr = this.getNestedValue(doc, path);
|
||||||
|
if (arr === undefined) {
|
||||||
|
arr = [];
|
||||||
|
this.setNestedValue(doc, path, arr);
|
||||||
|
}
|
||||||
|
if (!Array.isArray(arr)) {
|
||||||
|
throw new Error(`Cannot apply $push to non-array field: ${path}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (spec && typeof spec === 'object' && '$each' in spec) {
|
||||||
|
// $push with modifiers
|
||||||
|
let elements = (spec.$each as any[]).map(e => this.deepClone(e));
|
||||||
|
const position = spec.$position as number | undefined;
|
||||||
|
const slice = spec.$slice as number | undefined;
|
||||||
|
const sortSpec = spec.$sort;
|
||||||
|
|
||||||
|
if (position !== undefined) {
|
||||||
|
arr.splice(position, 0, ...elements);
|
||||||
|
} else {
|
||||||
|
arr.push(...elements);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sortSpec !== undefined) {
|
||||||
|
if (typeof sortSpec === 'number') {
|
||||||
|
arr.sort((a, b) => (a - b) * sortSpec);
|
||||||
|
} else {
|
||||||
|
// Sort by field(s)
|
||||||
|
const entries = Object.entries(sortSpec as Document);
|
||||||
|
arr.sort((a, b) => {
|
||||||
|
for (const [field, dir] of entries) {
|
||||||
|
const av = this.getNestedValue(a, field);
|
||||||
|
const bv = this.getNestedValue(b, field);
|
||||||
|
const cmp = this.compareValues(av, bv) * (dir as number);
|
||||||
|
if (cmp !== 0) return cmp;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (slice !== undefined) {
|
||||||
|
if (slice >= 0) {
|
||||||
|
arr.splice(slice);
|
||||||
|
} else {
|
||||||
|
arr.splice(0, arr.length + slice);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Simple push
|
||||||
|
arr.push(this.deepClone(spec));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static applyPop(doc: any, fields: Document): void {
|
||||||
|
for (const [path, direction] of Object.entries(fields)) {
|
||||||
|
const arr = this.getNestedValue(doc, path);
|
||||||
|
if (!Array.isArray(arr)) {
|
||||||
|
throw new Error(`Cannot apply $pop to non-array field: ${path}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if ((direction as number) === 1) {
|
||||||
|
arr.pop();
|
||||||
|
} else {
|
||||||
|
arr.shift();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static applyPull(doc: any, fields: Document, arrayFilters?: Document[]): void {
|
||||||
|
for (const [path, condition] of Object.entries(fields)) {
|
||||||
|
const arr = this.getNestedValue(doc, path);
|
||||||
|
if (!Array.isArray(arr)) {
|
||||||
|
continue; // Skip if not an array
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof condition === 'object' && condition !== null && !Array.isArray(condition)) {
|
||||||
|
// Condition is a query filter
|
||||||
|
const hasOperators = Object.keys(condition).some(k => k.startsWith('$'));
|
||||||
|
if (hasOperators) {
|
||||||
|
// Filter using query operators
|
||||||
|
const remaining = arr.filter(item => !QueryEngine.matches(item, condition));
|
||||||
|
arr.length = 0;
|
||||||
|
arr.push(...remaining);
|
||||||
|
} else {
|
||||||
|
// Match documents with all specified fields
|
||||||
|
const remaining = arr.filter(item => {
|
||||||
|
if (typeof item !== 'object' || item === null) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return !Object.entries(condition).every(([k, v]) => {
|
||||||
|
const itemVal = this.getNestedValue(item, k);
|
||||||
|
return this.valuesEqual(itemVal, v);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
arr.length = 0;
|
||||||
|
arr.push(...remaining);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Direct value match
|
||||||
|
const remaining = arr.filter(item => !this.valuesEqual(item, condition));
|
||||||
|
arr.length = 0;
|
||||||
|
arr.push(...remaining);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static applyPullAll(doc: any, fields: Document): void {
|
||||||
|
for (const [path, values] of Object.entries(fields)) {
|
||||||
|
const arr = this.getNestedValue(doc, path);
|
||||||
|
if (!Array.isArray(arr)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (!Array.isArray(values)) {
|
||||||
|
throw new Error(`$pullAll requires an array argument`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const valueSet = new Set(values.map(v => JSON.stringify(v)));
|
||||||
|
const remaining = arr.filter(item => !valueSet.has(JSON.stringify(item)));
|
||||||
|
arr.length = 0;
|
||||||
|
arr.push(...remaining);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static applyAddToSet(doc: any, fields: Document): void {
|
||||||
|
for (const [path, spec] of Object.entries(fields)) {
|
||||||
|
let arr = this.getNestedValue(doc, path);
|
||||||
|
if (arr === undefined) {
|
||||||
|
arr = [];
|
||||||
|
this.setNestedValue(doc, path, arr);
|
||||||
|
}
|
||||||
|
if (!Array.isArray(arr)) {
|
||||||
|
throw new Error(`Cannot apply $addToSet to non-array field: ${path}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const existingSet = new Set(arr.map(v => JSON.stringify(v)));
|
||||||
|
|
||||||
|
if (spec && typeof spec === 'object' && '$each' in spec) {
|
||||||
|
for (const item of spec.$each as any[]) {
|
||||||
|
const key = JSON.stringify(item);
|
||||||
|
if (!existingSet.has(key)) {
|
||||||
|
arr.push(this.deepClone(item));
|
||||||
|
existingSet.add(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const key = JSON.stringify(spec);
|
||||||
|
if (!existingSet.has(key)) {
|
||||||
|
arr.push(this.deepClone(spec));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static applyBit(doc: any, fields: Document): void {
|
||||||
|
for (const [path, operations] of Object.entries(fields)) {
|
||||||
|
let current = this.getNestedValue(doc, path) || 0;
|
||||||
|
if (typeof current !== 'number') {
|
||||||
|
throw new Error(`Cannot apply $bit to non-numeric field: ${path}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [op, value] of Object.entries(operations as Document)) {
|
||||||
|
switch (op) {
|
||||||
|
case 'and':
|
||||||
|
current = current & (value as number);
|
||||||
|
break;
|
||||||
|
case 'or':
|
||||||
|
current = current | (value as number);
|
||||||
|
break;
|
||||||
|
case 'xor':
|
||||||
|
current = current ^ (value as number);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.setNestedValue(doc, path, current);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Helper Methods
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
private static compareValues(a: any, b: any): number {
|
||||||
|
if (a === b) return 0;
|
||||||
|
if (a === null || a === undefined) return -1;
|
||||||
|
if (b === null || b === undefined) return 1;
|
||||||
|
|
||||||
|
if (typeof a === 'number' && typeof b === 'number') {
|
||||||
|
return a - b;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (a instanceof Date && b instanceof Date) {
|
||||||
|
return a.getTime() - b.getTime();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof a === 'string' && typeof b === 'string') {
|
||||||
|
return a.localeCompare(b);
|
||||||
|
}
|
||||||
|
|
||||||
|
return String(a).localeCompare(String(b));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static valuesEqual(a: any, b: any): boolean {
|
||||||
|
if (a === b) return true;
|
||||||
|
|
||||||
|
if (a instanceof plugins.bson.ObjectId && b instanceof plugins.bson.ObjectId) {
|
||||||
|
return a.equals(b);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (a instanceof Date && b instanceof Date) {
|
||||||
|
return a.getTime() === b.getTime();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof a === 'object' && typeof b === 'object' && a !== null && b !== null) {
|
||||||
|
return JSON.stringify(a) === JSON.stringify(b);
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
181
ts/ts_tsmdb/errors/TsmdbErrors.ts
Normal file
181
ts/ts_tsmdb/errors/TsmdbErrors.ts
Normal file
@@ -0,0 +1,181 @@
|
|||||||
|
/**
|
||||||
|
* Base error class for all TsmDB errors
|
||||||
|
* Mirrors MongoDB driver error hierarchy
|
||||||
|
*/
|
||||||
|
export class TsmdbError extends Error {
|
||||||
|
public code?: number;
|
||||||
|
public codeName?: string;
|
||||||
|
|
||||||
|
constructor(message: string, code?: number, codeName?: string) {
|
||||||
|
super(message);
|
||||||
|
this.name = 'TsmdbError';
|
||||||
|
this.code = code;
|
||||||
|
this.codeName = codeName;
|
||||||
|
Object.setPrototypeOf(this, new.target.prototype);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Error thrown during connection issues
|
||||||
|
*/
|
||||||
|
export class TsmdbConnectionError extends TsmdbError {
|
||||||
|
constructor(message: string) {
|
||||||
|
super(message);
|
||||||
|
this.name = 'TsmdbConnectionError';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Error thrown when an operation times out
|
||||||
|
*/
|
||||||
|
export class TsmdbTimeoutError extends TsmdbError {
|
||||||
|
constructor(message: string) {
|
||||||
|
super(message, 50, 'MaxTimeMSExpired');
|
||||||
|
this.name = 'TsmdbTimeoutError';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Error thrown during write operations
|
||||||
|
*/
|
||||||
|
export class TsmdbWriteError extends TsmdbError {
|
||||||
|
public writeErrors?: IWriteError[];
|
||||||
|
public result?: any;
|
||||||
|
|
||||||
|
constructor(message: string, code?: number, writeErrors?: IWriteError[]) {
|
||||||
|
super(message, code);
|
||||||
|
this.name = 'TsmdbWriteError';
|
||||||
|
this.writeErrors = writeErrors;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Error thrown for duplicate key violations
|
||||||
|
*/
|
||||||
|
export class TsmdbDuplicateKeyError extends TsmdbWriteError {
|
||||||
|
public keyPattern?: Record<string, 1>;
|
||||||
|
public keyValue?: Record<string, any>;
|
||||||
|
|
||||||
|
constructor(message: string, keyPattern?: Record<string, 1>, keyValue?: Record<string, any>) {
|
||||||
|
super(message, 11000);
|
||||||
|
this.name = 'TsmdbDuplicateKeyError';
|
||||||
|
this.codeName = 'DuplicateKey';
|
||||||
|
this.keyPattern = keyPattern;
|
||||||
|
this.keyValue = keyValue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Error thrown for bulk write failures
|
||||||
|
*/
|
||||||
|
export class TsmdbBulkWriteError extends TsmdbError {
|
||||||
|
public writeErrors: IWriteError[];
|
||||||
|
public result: any;
|
||||||
|
|
||||||
|
constructor(message: string, writeErrors: IWriteError[], result: any) {
|
||||||
|
super(message, 65);
|
||||||
|
this.name = 'TsmdbBulkWriteError';
|
||||||
|
this.writeErrors = writeErrors;
|
||||||
|
this.result = result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Error thrown during transaction operations
|
||||||
|
*/
|
||||||
|
export class TsmdbTransactionError extends TsmdbError {
|
||||||
|
constructor(message: string, code?: number) {
|
||||||
|
super(message, code);
|
||||||
|
this.name = 'TsmdbTransactionError';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Error thrown when a transaction is aborted due to conflict
|
||||||
|
*/
|
||||||
|
export class TsmdbWriteConflictError extends TsmdbTransactionError {
|
||||||
|
constructor(message: string = 'Write conflict during transaction') {
|
||||||
|
super(message, 112);
|
||||||
|
this.name = 'TsmdbWriteConflictError';
|
||||||
|
this.codeName = 'WriteConflict';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Error thrown for invalid arguments
|
||||||
|
*/
|
||||||
|
export class TsmdbArgumentError extends TsmdbError {
|
||||||
|
constructor(message: string) {
|
||||||
|
super(message);
|
||||||
|
this.name = 'TsmdbArgumentError';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Error thrown when an operation is not supported
|
||||||
|
*/
|
||||||
|
export class TsmdbNotSupportedError extends TsmdbError {
|
||||||
|
constructor(message: string) {
|
||||||
|
super(message, 115);
|
||||||
|
this.name = 'TsmdbNotSupportedError';
|
||||||
|
this.codeName = 'CommandNotSupported';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Error thrown when cursor is exhausted or closed
|
||||||
|
*/
|
||||||
|
export class TsmdbCursorError extends TsmdbError {
|
||||||
|
constructor(message: string) {
|
||||||
|
super(message);
|
||||||
|
this.name = 'TsmdbCursorError';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Error thrown when a namespace (database.collection) is invalid
|
||||||
|
*/
|
||||||
|
export class TsmdbNamespaceError extends TsmdbError {
|
||||||
|
constructor(message: string) {
|
||||||
|
super(message, 73);
|
||||||
|
this.name = 'TsmdbNamespaceError';
|
||||||
|
this.codeName = 'InvalidNamespace';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Error thrown when an index operation fails
|
||||||
|
*/
|
||||||
|
export class TsmdbIndexError extends TsmdbError {
|
||||||
|
constructor(message: string, code?: number) {
|
||||||
|
super(message, code || 86);
|
||||||
|
this.name = 'TsmdbIndexError';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Write error detail for bulk operations
|
||||||
|
*/
|
||||||
|
export interface IWriteError {
|
||||||
|
index: number;
|
||||||
|
code: number;
|
||||||
|
errmsg: string;
|
||||||
|
op: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert any error to a TsmdbError
|
||||||
|
*/
|
||||||
|
export function toTsmdbError(error: any): TsmdbError {
|
||||||
|
if (error instanceof TsmdbError) {
|
||||||
|
return error;
|
||||||
|
}
|
||||||
|
const tsmdbError = new TsmdbError(error.message || String(error));
|
||||||
|
if (error.code) {
|
||||||
|
tsmdbError.code = error.code;
|
||||||
|
}
|
||||||
|
if (error.codeName) {
|
||||||
|
tsmdbError.codeName = error.codeName;
|
||||||
|
}
|
||||||
|
return tsmdbError;
|
||||||
|
}
|
||||||
46
ts/ts_tsmdb/index.ts
Normal file
46
ts/ts_tsmdb/index.ts
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
// TsmDB - MongoDB Wire Protocol compatible in-memory database server
|
||||||
|
// Use the official MongoDB driver to connect to TsmdbServer
|
||||||
|
|
||||||
|
// Re-export plugins for external use
|
||||||
|
import * as plugins from './plugins.js';
|
||||||
|
export { plugins };
|
||||||
|
|
||||||
|
// Export BSON types for convenience
|
||||||
|
export { ObjectId, Binary, Timestamp, Long, Decimal128, UUID } from 'bson';
|
||||||
|
|
||||||
|
// Export all types
|
||||||
|
export * from './types/interfaces.js';
|
||||||
|
|
||||||
|
// Export errors
|
||||||
|
export * from './errors/TsmdbErrors.js';
|
||||||
|
|
||||||
|
// Export storage adapters
|
||||||
|
export type { IStorageAdapter } from './storage/IStorageAdapter.js';
|
||||||
|
export { MemoryStorageAdapter } from './storage/MemoryStorageAdapter.js';
|
||||||
|
export { FileStorageAdapter } from './storage/FileStorageAdapter.js';
|
||||||
|
export { OpLog } from './storage/OpLog.js';
|
||||||
|
export { WAL } from './storage/WAL.js';
|
||||||
|
export type { IWalEntry, TWalOperation } from './storage/WAL.js';
|
||||||
|
|
||||||
|
// Export engines
|
||||||
|
export { QueryEngine } from './engine/QueryEngine.js';
|
||||||
|
export { UpdateEngine } from './engine/UpdateEngine.js';
|
||||||
|
export { AggregationEngine } from './engine/AggregationEngine.js';
|
||||||
|
export { IndexEngine } from './engine/IndexEngine.js';
|
||||||
|
export { TransactionEngine } from './engine/TransactionEngine.js';
|
||||||
|
export { QueryPlanner } from './engine/QueryPlanner.js';
|
||||||
|
export type { IQueryPlan, TQueryPlanType } from './engine/QueryPlanner.js';
|
||||||
|
export { SessionEngine } from './engine/SessionEngine.js';
|
||||||
|
export type { ISession, ISessionEngineOptions } from './engine/SessionEngine.js';
|
||||||
|
|
||||||
|
// Export server (the main entry point for using TsmDB)
|
||||||
|
export { TsmdbServer } from './server/TsmdbServer.js';
|
||||||
|
export type { ITsmdbServerOptions } from './server/TsmdbServer.js';
|
||||||
|
|
||||||
|
// Export wire protocol utilities (for advanced usage)
|
||||||
|
export { WireProtocol } from './server/WireProtocol.js';
|
||||||
|
export { CommandRouter } from './server/CommandRouter.js';
|
||||||
|
export type { ICommandHandler, IHandlerContext, ICursorState } from './server/CommandRouter.js';
|
||||||
|
|
||||||
|
// Export utilities
|
||||||
|
export * from './utils/checksum.js';
|
||||||
17
ts/ts_tsmdb/plugins.ts
Normal file
17
ts/ts_tsmdb/plugins.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
// @push.rocks scope
|
||||||
|
import * as smartfs from '@push.rocks/smartfs';
|
||||||
|
import * as smartpath from '@push.rocks/smartpath';
|
||||||
|
import * as smartpromise from '@push.rocks/smartpromise';
|
||||||
|
import * as smartrx from '@push.rocks/smartrx';
|
||||||
|
|
||||||
|
export { smartfs, smartpath, smartpromise, smartrx };
|
||||||
|
|
||||||
|
// thirdparty
|
||||||
|
import * as bson from 'bson';
|
||||||
|
import * as mingo from 'mingo';
|
||||||
|
|
||||||
|
export { bson, mingo };
|
||||||
|
|
||||||
|
// Re-export commonly used mingo classes
|
||||||
|
export { Query } from 'mingo';
|
||||||
|
export { Aggregator } from 'mingo';
|
||||||
289
ts/ts_tsmdb/server/CommandRouter.ts
Normal file
289
ts/ts_tsmdb/server/CommandRouter.ts
Normal file
@@ -0,0 +1,289 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { IStorageAdapter } from '../storage/IStorageAdapter.js';
|
||||||
|
import type { IParsedCommand } from './WireProtocol.js';
|
||||||
|
import type { TsmdbServer } from './TsmdbServer.js';
|
||||||
|
import { IndexEngine } from '../engine/IndexEngine.js';
|
||||||
|
import { TransactionEngine } from '../engine/TransactionEngine.js';
|
||||||
|
import { SessionEngine } from '../engine/SessionEngine.js';
|
||||||
|
|
||||||
|
// Import handlers
|
||||||
|
import { HelloHandler } from './handlers/HelloHandler.js';
|
||||||
|
import { InsertHandler } from './handlers/InsertHandler.js';
|
||||||
|
import { FindHandler } from './handlers/FindHandler.js';
|
||||||
|
import { UpdateHandler } from './handlers/UpdateHandler.js';
|
||||||
|
import { DeleteHandler } from './handlers/DeleteHandler.js';
|
||||||
|
import { AggregateHandler } from './handlers/AggregateHandler.js';
|
||||||
|
import { IndexHandler } from './handlers/IndexHandler.js';
|
||||||
|
import { AdminHandler } from './handlers/AdminHandler.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handler context passed to command handlers
|
||||||
|
*/
|
||||||
|
export interface IHandlerContext {
|
||||||
|
storage: IStorageAdapter;
|
||||||
|
server: TsmdbServer;
|
||||||
|
database: string;
|
||||||
|
command: plugins.bson.Document;
|
||||||
|
documentSequences?: Map<string, plugins.bson.Document[]>;
|
||||||
|
/** Get or create an IndexEngine for a collection */
|
||||||
|
getIndexEngine: (collName: string) => IndexEngine;
|
||||||
|
/** Transaction engine instance */
|
||||||
|
transactionEngine: TransactionEngine;
|
||||||
|
/** Current transaction ID (if in a transaction) */
|
||||||
|
txnId?: string;
|
||||||
|
/** Session ID (from lsid) */
|
||||||
|
sessionId?: string;
|
||||||
|
/** Session engine instance */
|
||||||
|
sessionEngine: SessionEngine;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Command handler interface
|
||||||
|
*/
|
||||||
|
export interface ICommandHandler {
|
||||||
|
handle(context: IHandlerContext): Promise<plugins.bson.Document>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CommandRouter - Routes incoming commands to appropriate handlers
|
||||||
|
*/
|
||||||
|
export class CommandRouter {
|
||||||
|
private storage: IStorageAdapter;
|
||||||
|
private server: TsmdbServer;
|
||||||
|
private handlers: Map<string, ICommandHandler> = new Map();
|
||||||
|
|
||||||
|
// Cursor state for getMore operations
|
||||||
|
private cursors: Map<bigint, ICursorState> = new Map();
|
||||||
|
private cursorIdCounter: bigint = BigInt(1);
|
||||||
|
|
||||||
|
// Index engine cache: db.collection -> IndexEngine
|
||||||
|
private indexEngines: Map<string, IndexEngine> = new Map();
|
||||||
|
|
||||||
|
// Transaction engine (shared across all handlers)
|
||||||
|
private transactionEngine: TransactionEngine;
|
||||||
|
|
||||||
|
// Session engine (shared across all handlers)
|
||||||
|
private sessionEngine: SessionEngine;
|
||||||
|
|
||||||
|
constructor(storage: IStorageAdapter, server: TsmdbServer) {
|
||||||
|
this.storage = storage;
|
||||||
|
this.server = server;
|
||||||
|
this.transactionEngine = new TransactionEngine(storage);
|
||||||
|
this.sessionEngine = new SessionEngine();
|
||||||
|
// Link session engine to transaction engine for auto-abort on session expiry
|
||||||
|
this.sessionEngine.setTransactionEngine(this.transactionEngine);
|
||||||
|
this.registerHandlers();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get or create an IndexEngine for a database.collection
|
||||||
|
*/
|
||||||
|
getIndexEngine(dbName: string, collName: string): IndexEngine {
|
||||||
|
const key = `${dbName}.${collName}`;
|
||||||
|
let engine = this.indexEngines.get(key);
|
||||||
|
if (!engine) {
|
||||||
|
engine = new IndexEngine(dbName, collName, this.storage);
|
||||||
|
this.indexEngines.set(key, engine);
|
||||||
|
}
|
||||||
|
return engine;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear index engine cache for a collection (used when collection is dropped)
|
||||||
|
*/
|
||||||
|
clearIndexEngineCache(dbName: string, collName?: string): void {
|
||||||
|
if (collName) {
|
||||||
|
this.indexEngines.delete(`${dbName}.${collName}`);
|
||||||
|
} else {
|
||||||
|
// Clear all engines for the database
|
||||||
|
for (const key of this.indexEngines.keys()) {
|
||||||
|
if (key.startsWith(`${dbName}.`)) {
|
||||||
|
this.indexEngines.delete(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register all command handlers
|
||||||
|
*/
|
||||||
|
private registerHandlers(): void {
|
||||||
|
// Create handler instances with shared state
|
||||||
|
const helloHandler = new HelloHandler();
|
||||||
|
const findHandler = new FindHandler(this.cursors, () => this.cursorIdCounter++);
|
||||||
|
const insertHandler = new InsertHandler();
|
||||||
|
const updateHandler = new UpdateHandler();
|
||||||
|
const deleteHandler = new DeleteHandler();
|
||||||
|
const aggregateHandler = new AggregateHandler(this.cursors, () => this.cursorIdCounter++);
|
||||||
|
const indexHandler = new IndexHandler();
|
||||||
|
const adminHandler = new AdminHandler();
|
||||||
|
|
||||||
|
// Handshake commands
|
||||||
|
this.handlers.set('hello', helloHandler);
|
||||||
|
this.handlers.set('ismaster', helloHandler);
|
||||||
|
this.handlers.set('isMaster', helloHandler);
|
||||||
|
|
||||||
|
// CRUD commands
|
||||||
|
this.handlers.set('find', findHandler);
|
||||||
|
this.handlers.set('insert', insertHandler);
|
||||||
|
this.handlers.set('update', updateHandler);
|
||||||
|
this.handlers.set('delete', deleteHandler);
|
||||||
|
this.handlers.set('findAndModify', updateHandler);
|
||||||
|
this.handlers.set('getMore', findHandler);
|
||||||
|
this.handlers.set('killCursors', findHandler);
|
||||||
|
|
||||||
|
// Aggregation
|
||||||
|
this.handlers.set('aggregate', aggregateHandler);
|
||||||
|
this.handlers.set('count', findHandler);
|
||||||
|
this.handlers.set('distinct', findHandler);
|
||||||
|
|
||||||
|
// Index operations
|
||||||
|
this.handlers.set('createIndexes', indexHandler);
|
||||||
|
this.handlers.set('dropIndexes', indexHandler);
|
||||||
|
this.handlers.set('listIndexes', indexHandler);
|
||||||
|
|
||||||
|
// Admin/Database operations
|
||||||
|
this.handlers.set('ping', adminHandler);
|
||||||
|
this.handlers.set('listDatabases', adminHandler);
|
||||||
|
this.handlers.set('listCollections', adminHandler);
|
||||||
|
this.handlers.set('drop', adminHandler);
|
||||||
|
this.handlers.set('dropDatabase', adminHandler);
|
||||||
|
this.handlers.set('create', adminHandler);
|
||||||
|
this.handlers.set('serverStatus', adminHandler);
|
||||||
|
this.handlers.set('buildInfo', adminHandler);
|
||||||
|
this.handlers.set('whatsmyuri', adminHandler);
|
||||||
|
this.handlers.set('getLog', adminHandler);
|
||||||
|
this.handlers.set('hostInfo', adminHandler);
|
||||||
|
this.handlers.set('replSetGetStatus', adminHandler);
|
||||||
|
this.handlers.set('isMaster', helloHandler);
|
||||||
|
this.handlers.set('saslStart', adminHandler);
|
||||||
|
this.handlers.set('saslContinue', adminHandler);
|
||||||
|
this.handlers.set('endSessions', adminHandler);
|
||||||
|
this.handlers.set('abortTransaction', adminHandler);
|
||||||
|
this.handlers.set('commitTransaction', adminHandler);
|
||||||
|
this.handlers.set('collStats', adminHandler);
|
||||||
|
this.handlers.set('dbStats', adminHandler);
|
||||||
|
this.handlers.set('connectionStatus', adminHandler);
|
||||||
|
this.handlers.set('currentOp', adminHandler);
|
||||||
|
this.handlers.set('collMod', adminHandler);
|
||||||
|
this.handlers.set('renameCollection', adminHandler);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Route a command to its handler
|
||||||
|
*/
|
||||||
|
async route(parsedCommand: IParsedCommand): Promise<plugins.bson.Document> {
|
||||||
|
const { commandName, command, database, documentSequences } = parsedCommand;
|
||||||
|
|
||||||
|
// Extract session ID from lsid using SessionEngine helper
|
||||||
|
let sessionId = SessionEngine.extractSessionId(command.lsid);
|
||||||
|
let txnId: string | undefined;
|
||||||
|
|
||||||
|
// If we have a session ID, register/touch the session
|
||||||
|
if (sessionId) {
|
||||||
|
this.sessionEngine.getOrCreateSession(sessionId);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this starts a new transaction
|
||||||
|
if (command.startTransaction && sessionId) {
|
||||||
|
txnId = this.transactionEngine.startTransaction(sessionId);
|
||||||
|
this.sessionEngine.startTransaction(sessionId, txnId, command.txnNumber);
|
||||||
|
} else if (sessionId && this.sessionEngine.isInTransaction(sessionId)) {
|
||||||
|
// Continue existing transaction
|
||||||
|
txnId = this.sessionEngine.getTransactionId(sessionId);
|
||||||
|
// Verify transaction is still active
|
||||||
|
if (txnId && !this.transactionEngine.isActive(txnId)) {
|
||||||
|
this.sessionEngine.endTransaction(sessionId);
|
||||||
|
txnId = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create handler context
|
||||||
|
const context: IHandlerContext = {
|
||||||
|
storage: this.storage,
|
||||||
|
server: this.server,
|
||||||
|
database,
|
||||||
|
command,
|
||||||
|
documentSequences,
|
||||||
|
getIndexEngine: (collName: string) => this.getIndexEngine(database, collName),
|
||||||
|
transactionEngine: this.transactionEngine,
|
||||||
|
sessionEngine: this.sessionEngine,
|
||||||
|
txnId,
|
||||||
|
sessionId,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Find handler
|
||||||
|
const handler = this.handlers.get(commandName);
|
||||||
|
|
||||||
|
if (!handler) {
|
||||||
|
// Unknown command
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: `no such command: '${commandName}'`,
|
||||||
|
code: 59,
|
||||||
|
codeName: 'CommandNotFound',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await handler.handle(context);
|
||||||
|
} catch (error: any) {
|
||||||
|
// Handle known error types
|
||||||
|
if (error.code) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: error.message,
|
||||||
|
code: error.code,
|
||||||
|
codeName: error.codeName || 'UnknownError',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generic error
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: error.message || 'Internal error',
|
||||||
|
code: 1,
|
||||||
|
codeName: 'InternalError',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Close the command router and cleanup resources
|
||||||
|
*/
|
||||||
|
close(): void {
|
||||||
|
// Close session engine (stops cleanup interval, clears sessions)
|
||||||
|
this.sessionEngine.close();
|
||||||
|
// Clear cursors
|
||||||
|
this.cursors.clear();
|
||||||
|
// Clear index engine cache
|
||||||
|
this.indexEngines.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get session engine (for administrative purposes)
|
||||||
|
*/
|
||||||
|
getSessionEngine(): SessionEngine {
|
||||||
|
return this.sessionEngine;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get transaction engine (for administrative purposes)
|
||||||
|
*/
|
||||||
|
getTransactionEngine(): TransactionEngine {
|
||||||
|
return this.transactionEngine;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cursor state for multi-batch queries
|
||||||
|
*/
|
||||||
|
export interface ICursorState {
|
||||||
|
id: bigint;
|
||||||
|
database: string;
|
||||||
|
collection: string;
|
||||||
|
documents: plugins.bson.Document[];
|
||||||
|
position: number;
|
||||||
|
batchSize: number;
|
||||||
|
createdAt: Date;
|
||||||
|
}
|
||||||
301
ts/ts_tsmdb/server/TsmdbServer.ts
Normal file
301
ts/ts_tsmdb/server/TsmdbServer.ts
Normal file
@@ -0,0 +1,301 @@
|
|||||||
|
import * as net from 'net';
|
||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import { WireProtocol, OP_QUERY } from './WireProtocol.js';
|
||||||
|
import { CommandRouter } from './CommandRouter.js';
|
||||||
|
import { MemoryStorageAdapter } from '../storage/MemoryStorageAdapter.js';
|
||||||
|
import { FileStorageAdapter } from '../storage/FileStorageAdapter.js';
|
||||||
|
import type { IStorageAdapter } from '../storage/IStorageAdapter.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Server configuration options
|
||||||
|
*/
|
||||||
|
export interface ITsmdbServerOptions {
|
||||||
|
/** Port to listen on (default: 27017) */
|
||||||
|
port?: number;
|
||||||
|
/** Host to bind to (default: 127.0.0.1) */
|
||||||
|
host?: string;
|
||||||
|
/** Storage type: 'memory' or 'file' (default: 'memory') */
|
||||||
|
storage?: 'memory' | 'file';
|
||||||
|
/** Path for file storage (required if storage is 'file') */
|
||||||
|
storagePath?: string;
|
||||||
|
/** Enable persistence for memory storage */
|
||||||
|
persistPath?: string;
|
||||||
|
/** Persistence interval in ms (default: 60000) */
|
||||||
|
persistIntervalMs?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Connection state for each client
|
||||||
|
*/
|
||||||
|
interface IConnectionState {
|
||||||
|
id: number;
|
||||||
|
socket: net.Socket;
|
||||||
|
buffer: Buffer;
|
||||||
|
authenticated: boolean;
|
||||||
|
database: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* TsmdbServer - MongoDB Wire Protocol compatible server
|
||||||
|
*
|
||||||
|
* This server implements the MongoDB wire protocol (OP_MSG) to allow
|
||||||
|
* official MongoDB drivers to connect and perform operations.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* import { TsmdbServer } from '@push.rocks/smartmongo/tsmdb';
|
||||||
|
* import { MongoClient } from 'mongodb';
|
||||||
|
*
|
||||||
|
* const server = new TsmdbServer({ port: 27017 });
|
||||||
|
* await server.start();
|
||||||
|
*
|
||||||
|
* const client = new MongoClient('mongodb://127.0.0.1:27017');
|
||||||
|
* await client.connect();
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export class TsmdbServer {
|
||||||
|
private options: Required<ITsmdbServerOptions>;
|
||||||
|
private server: net.Server | null = null;
|
||||||
|
private storage: IStorageAdapter;
|
||||||
|
private commandRouter: CommandRouter;
|
||||||
|
private connections: Map<number, IConnectionState> = new Map();
|
||||||
|
private connectionIdCounter = 0;
|
||||||
|
private isRunning = false;
|
||||||
|
private startTime: Date = new Date();
|
||||||
|
|
||||||
|
constructor(options: ITsmdbServerOptions = {}) {
|
||||||
|
this.options = {
|
||||||
|
port: options.port ?? 27017,
|
||||||
|
host: options.host ?? '127.0.0.1',
|
||||||
|
storage: options.storage ?? 'memory',
|
||||||
|
storagePath: options.storagePath ?? './data',
|
||||||
|
persistPath: options.persistPath ?? '',
|
||||||
|
persistIntervalMs: options.persistIntervalMs ?? 60000,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create storage adapter
|
||||||
|
if (this.options.storage === 'file') {
|
||||||
|
this.storage = new FileStorageAdapter(this.options.storagePath);
|
||||||
|
} else {
|
||||||
|
this.storage = new MemoryStorageAdapter({
|
||||||
|
persistPath: this.options.persistPath || undefined,
|
||||||
|
persistIntervalMs: this.options.persistPath ? this.options.persistIntervalMs : undefined,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create command router
|
||||||
|
this.commandRouter = new CommandRouter(this.storage, this);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the storage adapter (for testing/debugging)
|
||||||
|
*/
|
||||||
|
getStorage(): IStorageAdapter {
|
||||||
|
return this.storage;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get server uptime in seconds
|
||||||
|
*/
|
||||||
|
getUptime(): number {
|
||||||
|
return Math.floor((Date.now() - this.startTime.getTime()) / 1000);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current connection count
|
||||||
|
*/
|
||||||
|
getConnectionCount(): number {
|
||||||
|
return this.connections.size;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start the server
|
||||||
|
*/
|
||||||
|
async start(): Promise<void> {
|
||||||
|
if (this.isRunning) {
|
||||||
|
throw new Error('Server is already running');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize storage
|
||||||
|
await this.storage.initialize();
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
this.server = net.createServer((socket) => {
|
||||||
|
this.handleConnection(socket);
|
||||||
|
});
|
||||||
|
|
||||||
|
this.server.on('error', (err) => {
|
||||||
|
if (!this.isRunning) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
console.error('Server error:', err);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
this.server.listen(this.options.port, this.options.host, () => {
|
||||||
|
this.isRunning = true;
|
||||||
|
this.startTime = new Date();
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop the server
|
||||||
|
*/
|
||||||
|
async stop(): Promise<void> {
|
||||||
|
if (!this.isRunning || !this.server) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close all connections
|
||||||
|
for (const conn of this.connections.values()) {
|
||||||
|
conn.socket.destroy();
|
||||||
|
}
|
||||||
|
this.connections.clear();
|
||||||
|
|
||||||
|
// Close command router (cleans up session engine, cursors, etc.)
|
||||||
|
this.commandRouter.close();
|
||||||
|
|
||||||
|
// Close storage
|
||||||
|
await this.storage.close();
|
||||||
|
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
this.server!.close(() => {
|
||||||
|
this.isRunning = false;
|
||||||
|
this.server = null;
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle a new client connection
|
||||||
|
*/
|
||||||
|
private handleConnection(socket: net.Socket): void {
|
||||||
|
const connectionId = ++this.connectionIdCounter;
|
||||||
|
|
||||||
|
const state: IConnectionState = {
|
||||||
|
id: connectionId,
|
||||||
|
socket,
|
||||||
|
buffer: Buffer.alloc(0),
|
||||||
|
authenticated: true, // No auth required for now
|
||||||
|
database: 'test',
|
||||||
|
};
|
||||||
|
|
||||||
|
this.connections.set(connectionId, state);
|
||||||
|
|
||||||
|
socket.on('data', (data) => {
|
||||||
|
this.handleData(state, Buffer.isBuffer(data) ? data : Buffer.from(data));
|
||||||
|
});
|
||||||
|
|
||||||
|
socket.on('close', () => {
|
||||||
|
this.connections.delete(connectionId);
|
||||||
|
});
|
||||||
|
|
||||||
|
socket.on('error', (err) => {
|
||||||
|
// Connection errors are expected when clients disconnect
|
||||||
|
this.connections.delete(connectionId);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle incoming data from a client
|
||||||
|
*/
|
||||||
|
private handleData(state: IConnectionState, data: Buffer): void {
|
||||||
|
// Append new data to buffer
|
||||||
|
state.buffer = Buffer.concat([state.buffer, data]);
|
||||||
|
|
||||||
|
// Process messages from buffer
|
||||||
|
this.processMessages(state);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process complete messages from the buffer
|
||||||
|
*/
|
||||||
|
private async processMessages(state: IConnectionState): Promise<void> {
|
||||||
|
while (state.buffer.length >= 16) {
|
||||||
|
try {
|
||||||
|
const result = WireProtocol.parseMessage(state.buffer);
|
||||||
|
|
||||||
|
if (!result) {
|
||||||
|
// Not enough data for a complete message
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { command, bytesConsumed } = result;
|
||||||
|
|
||||||
|
// Remove processed bytes from buffer
|
||||||
|
state.buffer = state.buffer.subarray(bytesConsumed);
|
||||||
|
|
||||||
|
// Process the command
|
||||||
|
const response = await this.commandRouter.route(command);
|
||||||
|
|
||||||
|
// Encode and send response
|
||||||
|
let responseBuffer: Buffer;
|
||||||
|
if (command.opCode === OP_QUERY) {
|
||||||
|
// Legacy OP_QUERY gets OP_REPLY response
|
||||||
|
responseBuffer = WireProtocol.encodeOpReplyResponse(
|
||||||
|
command.requestID,
|
||||||
|
[response]
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
// OP_MSG gets OP_MSG response
|
||||||
|
responseBuffer = WireProtocol.encodeOpMsgResponse(
|
||||||
|
command.requestID,
|
||||||
|
response
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!state.socket.destroyed) {
|
||||||
|
state.socket.write(responseBuffer);
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
// Send error response
|
||||||
|
const errorResponse = WireProtocol.encodeErrorResponse(
|
||||||
|
0, // We don't have the requestID at this point
|
||||||
|
1,
|
||||||
|
error.message || 'Internal error'
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!state.socket.destroyed) {
|
||||||
|
state.socket.write(errorResponse);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear buffer on parse errors to avoid infinite loops
|
||||||
|
if (error.message?.includes('opCode') || error.message?.includes('section')) {
|
||||||
|
state.buffer = Buffer.alloc(0);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the connection URI for this server
|
||||||
|
*/
|
||||||
|
getConnectionUri(): string {
|
||||||
|
return `mongodb://${this.options.host}:${this.options.port}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the server is running
|
||||||
|
*/
|
||||||
|
get running(): boolean {
|
||||||
|
return this.isRunning;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the port the server is listening on
|
||||||
|
*/
|
||||||
|
get port(): number {
|
||||||
|
return this.options.port;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the host the server is bound to
|
||||||
|
*/
|
||||||
|
get host(): string {
|
||||||
|
return this.options.host;
|
||||||
|
}
|
||||||
|
}
|
||||||
416
ts/ts_tsmdb/server/WireProtocol.ts
Normal file
416
ts/ts_tsmdb/server/WireProtocol.ts
Normal file
@@ -0,0 +1,416 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MongoDB Wire Protocol Implementation
|
||||||
|
* Handles parsing and encoding of MongoDB wire protocol messages (OP_MSG primarily)
|
||||||
|
*
|
||||||
|
* Wire Protocol Message Format:
|
||||||
|
* - Header (16 bytes): messageLength (4), requestID (4), responseTo (4), opCode (4)
|
||||||
|
* - OP_MSG: flagBits (4), sections[], optional checksum (4)
|
||||||
|
*
|
||||||
|
* References:
|
||||||
|
* - https://www.mongodb.com/docs/manual/reference/mongodb-wire-protocol/
|
||||||
|
*/
|
||||||
|
|
||||||
|
// OpCodes
|
||||||
|
export const OP_REPLY = 1; // Legacy reply
|
||||||
|
export const OP_UPDATE = 2001; // Legacy update
|
||||||
|
export const OP_INSERT = 2002; // Legacy insert
|
||||||
|
export const OP_QUERY = 2004; // Legacy query (still used for initial handshake)
|
||||||
|
export const OP_GET_MORE = 2005; // Legacy getMore
|
||||||
|
export const OP_DELETE = 2006; // Legacy delete
|
||||||
|
export const OP_KILL_CURSORS = 2007; // Legacy kill cursors
|
||||||
|
export const OP_COMPRESSED = 2012; // Compressed message
|
||||||
|
export const OP_MSG = 2013; // Modern protocol (MongoDB 3.6+)
|
||||||
|
|
||||||
|
// OP_MSG Section Types
|
||||||
|
export const SECTION_BODY = 0; // Single BSON document
|
||||||
|
export const SECTION_DOCUMENT_SEQUENCE = 1; // Document sequence for bulk operations
|
||||||
|
|
||||||
|
// OP_MSG Flag Bits
|
||||||
|
export const MSG_FLAG_CHECKSUM_PRESENT = 1 << 0;
|
||||||
|
export const MSG_FLAG_MORE_TO_COME = 1 << 1;
|
||||||
|
export const MSG_FLAG_EXHAUST_ALLOWED = 1 << 16;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parsed message header
|
||||||
|
*/
|
||||||
|
export interface IMessageHeader {
|
||||||
|
messageLength: number;
|
||||||
|
requestID: number;
|
||||||
|
responseTo: number;
|
||||||
|
opCode: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parsed OP_MSG message
|
||||||
|
*/
|
||||||
|
export interface IOpMsgMessage {
|
||||||
|
header: IMessageHeader;
|
||||||
|
flagBits: number;
|
||||||
|
sections: IOpMsgSection[];
|
||||||
|
checksum?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* OP_MSG section (either body or document sequence)
|
||||||
|
*/
|
||||||
|
export interface IOpMsgSection {
|
||||||
|
type: number;
|
||||||
|
payload: plugins.bson.Document;
|
||||||
|
sequenceIdentifier?: string;
|
||||||
|
documents?: plugins.bson.Document[];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parsed OP_QUERY message (legacy, but used for initial handshake)
|
||||||
|
*/
|
||||||
|
export interface IOpQueryMessage {
|
||||||
|
header: IMessageHeader;
|
||||||
|
flags: number;
|
||||||
|
fullCollectionName: string;
|
||||||
|
numberToSkip: number;
|
||||||
|
numberToReturn: number;
|
||||||
|
query: plugins.bson.Document;
|
||||||
|
returnFieldsSelector?: plugins.bson.Document;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parsed command from any message type
|
||||||
|
*/
|
||||||
|
export interface IParsedCommand {
|
||||||
|
commandName: string;
|
||||||
|
command: plugins.bson.Document;
|
||||||
|
database: string;
|
||||||
|
requestID: number;
|
||||||
|
opCode: number;
|
||||||
|
documentSequences?: Map<string, plugins.bson.Document[]>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wire Protocol parser and encoder
|
||||||
|
*/
|
||||||
|
export class WireProtocol {
|
||||||
|
/**
|
||||||
|
* Parse a complete message from a buffer
|
||||||
|
* Returns the parsed command and the number of bytes consumed
|
||||||
|
*/
|
||||||
|
static parseMessage(buffer: Buffer): { command: IParsedCommand; bytesConsumed: number } | null {
|
||||||
|
if (buffer.length < 16) {
|
||||||
|
return null; // Not enough data for header
|
||||||
|
}
|
||||||
|
|
||||||
|
const header = this.parseHeader(buffer);
|
||||||
|
|
||||||
|
if (buffer.length < header.messageLength) {
|
||||||
|
return null; // Not enough data for complete message
|
||||||
|
}
|
||||||
|
|
||||||
|
const messageBuffer = buffer.subarray(0, header.messageLength);
|
||||||
|
|
||||||
|
switch (header.opCode) {
|
||||||
|
case OP_MSG:
|
||||||
|
return this.parseOpMsg(messageBuffer, header);
|
||||||
|
case OP_QUERY:
|
||||||
|
return this.parseOpQuery(messageBuffer, header);
|
||||||
|
default:
|
||||||
|
throw new Error(`Unsupported opCode: ${header.opCode}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse message header (16 bytes)
|
||||||
|
*/
|
||||||
|
private static parseHeader(buffer: Buffer): IMessageHeader {
|
||||||
|
return {
|
||||||
|
messageLength: buffer.readInt32LE(0),
|
||||||
|
requestID: buffer.readInt32LE(4),
|
||||||
|
responseTo: buffer.readInt32LE(8),
|
||||||
|
opCode: buffer.readInt32LE(12),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse OP_MSG message
|
||||||
|
*/
|
||||||
|
private static parseOpMsg(buffer: Buffer, header: IMessageHeader): { command: IParsedCommand; bytesConsumed: number } {
|
||||||
|
let offset = 16; // Skip header
|
||||||
|
|
||||||
|
const flagBits = buffer.readUInt32LE(offset);
|
||||||
|
offset += 4;
|
||||||
|
|
||||||
|
const sections: IOpMsgSection[] = [];
|
||||||
|
const documentSequences = new Map<string, plugins.bson.Document[]>();
|
||||||
|
|
||||||
|
// Parse sections until we reach the end (or checksum)
|
||||||
|
const messageEnd = (flagBits & MSG_FLAG_CHECKSUM_PRESENT)
|
||||||
|
? header.messageLength - 4
|
||||||
|
: header.messageLength;
|
||||||
|
|
||||||
|
while (offset < messageEnd) {
|
||||||
|
const sectionType = buffer.readUInt8(offset);
|
||||||
|
offset += 1;
|
||||||
|
|
||||||
|
if (sectionType === SECTION_BODY) {
|
||||||
|
// Single BSON document
|
||||||
|
const docSize = buffer.readInt32LE(offset);
|
||||||
|
const docBuffer = buffer.subarray(offset, offset + docSize);
|
||||||
|
const doc = plugins.bson.deserialize(docBuffer);
|
||||||
|
sections.push({ type: SECTION_BODY, payload: doc });
|
||||||
|
offset += docSize;
|
||||||
|
} else if (sectionType === SECTION_DOCUMENT_SEQUENCE) {
|
||||||
|
// Document sequence
|
||||||
|
const sectionSize = buffer.readInt32LE(offset);
|
||||||
|
const sectionEnd = offset + sectionSize;
|
||||||
|
offset += 4;
|
||||||
|
|
||||||
|
// Read sequence identifier (C string)
|
||||||
|
let identifierEnd = offset;
|
||||||
|
while (buffer[identifierEnd] !== 0 && identifierEnd < sectionEnd) {
|
||||||
|
identifierEnd++;
|
||||||
|
}
|
||||||
|
const identifier = buffer.subarray(offset, identifierEnd).toString('utf8');
|
||||||
|
offset = identifierEnd + 1; // Skip null terminator
|
||||||
|
|
||||||
|
// Read documents
|
||||||
|
const documents: plugins.bson.Document[] = [];
|
||||||
|
while (offset < sectionEnd) {
|
||||||
|
const docSize = buffer.readInt32LE(offset);
|
||||||
|
const docBuffer = buffer.subarray(offset, offset + docSize);
|
||||||
|
documents.push(plugins.bson.deserialize(docBuffer));
|
||||||
|
offset += docSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
sections.push({
|
||||||
|
type: SECTION_DOCUMENT_SEQUENCE,
|
||||||
|
payload: {},
|
||||||
|
sequenceIdentifier: identifier,
|
||||||
|
documents
|
||||||
|
});
|
||||||
|
documentSequences.set(identifier, documents);
|
||||||
|
} else {
|
||||||
|
throw new Error(`Unknown section type: ${sectionType}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The first section body contains the command
|
||||||
|
const commandSection = sections.find(s => s.type === SECTION_BODY);
|
||||||
|
if (!commandSection) {
|
||||||
|
throw new Error('OP_MSG missing command body section');
|
||||||
|
}
|
||||||
|
|
||||||
|
const command = commandSection.payload;
|
||||||
|
const commandName = Object.keys(command)[0];
|
||||||
|
const database = command.$db || 'admin';
|
||||||
|
|
||||||
|
return {
|
||||||
|
command: {
|
||||||
|
commandName,
|
||||||
|
command,
|
||||||
|
database,
|
||||||
|
requestID: header.requestID,
|
||||||
|
opCode: header.opCode,
|
||||||
|
documentSequences: documentSequences.size > 0 ? documentSequences : undefined,
|
||||||
|
},
|
||||||
|
bytesConsumed: header.messageLength,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse OP_QUERY message (legacy, used for initial handshake)
|
||||||
|
*/
|
||||||
|
private static parseOpQuery(buffer: Buffer, header: IMessageHeader): { command: IParsedCommand; bytesConsumed: number } {
|
||||||
|
let offset = 16; // Skip header
|
||||||
|
|
||||||
|
const flags = buffer.readInt32LE(offset);
|
||||||
|
offset += 4;
|
||||||
|
|
||||||
|
// Read full collection name (C string)
|
||||||
|
let nameEnd = offset;
|
||||||
|
while (buffer[nameEnd] !== 0 && nameEnd < buffer.length) {
|
||||||
|
nameEnd++;
|
||||||
|
}
|
||||||
|
const fullCollectionName = buffer.subarray(offset, nameEnd).toString('utf8');
|
||||||
|
offset = nameEnd + 1;
|
||||||
|
|
||||||
|
const numberToSkip = buffer.readInt32LE(offset);
|
||||||
|
offset += 4;
|
||||||
|
|
||||||
|
const numberToReturn = buffer.readInt32LE(offset);
|
||||||
|
offset += 4;
|
||||||
|
|
||||||
|
// Read query document
|
||||||
|
const querySize = buffer.readInt32LE(offset);
|
||||||
|
const queryBuffer = buffer.subarray(offset, offset + querySize);
|
||||||
|
const query = plugins.bson.deserialize(queryBuffer);
|
||||||
|
offset += querySize;
|
||||||
|
|
||||||
|
// Extract database from collection name (format: "dbname.$cmd" or "dbname.collection")
|
||||||
|
const parts = fullCollectionName.split('.');
|
||||||
|
const database = parts[0];
|
||||||
|
|
||||||
|
// For OP_QUERY to .$cmd, the query IS the command
|
||||||
|
let commandName = 'find';
|
||||||
|
let command = query;
|
||||||
|
|
||||||
|
if (parts[1] === '$cmd') {
|
||||||
|
// This is a command
|
||||||
|
commandName = Object.keys(query)[0];
|
||||||
|
// Handle special commands like isMaster, hello
|
||||||
|
if (commandName === 'isMaster' || commandName === 'ismaster') {
|
||||||
|
commandName = 'hello';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
command: {
|
||||||
|
commandName,
|
||||||
|
command,
|
||||||
|
database,
|
||||||
|
requestID: header.requestID,
|
||||||
|
opCode: header.opCode,
|
||||||
|
},
|
||||||
|
bytesConsumed: header.messageLength,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Encode a response as OP_MSG
|
||||||
|
*/
|
||||||
|
static encodeOpMsgResponse(
|
||||||
|
responseTo: number,
|
||||||
|
response: plugins.bson.Document,
|
||||||
|
requestID: number = Math.floor(Math.random() * 0x7FFFFFFF)
|
||||||
|
): Buffer {
|
||||||
|
// Add $db if not present (optional in response)
|
||||||
|
const responseDoc = { ...response };
|
||||||
|
|
||||||
|
// Serialize the response document
|
||||||
|
const bodyBson = plugins.bson.serialize(responseDoc);
|
||||||
|
|
||||||
|
// Calculate message length
|
||||||
|
// Header (16) + flagBits (4) + section type (1) + body BSON
|
||||||
|
const messageLength = 16 + 4 + 1 + bodyBson.length;
|
||||||
|
|
||||||
|
const buffer = Buffer.alloc(messageLength);
|
||||||
|
let offset = 0;
|
||||||
|
|
||||||
|
// Write header
|
||||||
|
buffer.writeInt32LE(messageLength, offset); // messageLength
|
||||||
|
offset += 4;
|
||||||
|
buffer.writeInt32LE(requestID, offset); // requestID
|
||||||
|
offset += 4;
|
||||||
|
buffer.writeInt32LE(responseTo, offset); // responseTo
|
||||||
|
offset += 4;
|
||||||
|
buffer.writeInt32LE(OP_MSG, offset); // opCode
|
||||||
|
offset += 4;
|
||||||
|
|
||||||
|
// Write flagBits (0 = no flags)
|
||||||
|
buffer.writeUInt32LE(0, offset);
|
||||||
|
offset += 4;
|
||||||
|
|
||||||
|
// Write section type 0 (body)
|
||||||
|
buffer.writeUInt8(SECTION_BODY, offset);
|
||||||
|
offset += 1;
|
||||||
|
|
||||||
|
// Write body BSON
|
||||||
|
Buffer.from(bodyBson).copy(buffer, offset);
|
||||||
|
|
||||||
|
return buffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Encode a response as OP_REPLY (legacy, for OP_QUERY responses)
|
||||||
|
*/
|
||||||
|
static encodeOpReplyResponse(
|
||||||
|
responseTo: number,
|
||||||
|
documents: plugins.bson.Document[],
|
||||||
|
requestID: number = Math.floor(Math.random() * 0x7FFFFFFF),
|
||||||
|
cursorId: bigint = BigInt(0)
|
||||||
|
): Buffer {
|
||||||
|
// Serialize all documents
|
||||||
|
const docBuffers = documents.map(doc => plugins.bson.serialize(doc));
|
||||||
|
const totalDocsSize = docBuffers.reduce((sum, buf) => sum + buf.length, 0);
|
||||||
|
|
||||||
|
// Message format:
|
||||||
|
// Header (16) + responseFlags (4) + cursorID (8) + startingFrom (4) + numberReturned (4) + documents
|
||||||
|
const messageLength = 16 + 4 + 8 + 4 + 4 + totalDocsSize;
|
||||||
|
|
||||||
|
const buffer = Buffer.alloc(messageLength);
|
||||||
|
let offset = 0;
|
||||||
|
|
||||||
|
// Write header
|
||||||
|
buffer.writeInt32LE(messageLength, offset); // messageLength
|
||||||
|
offset += 4;
|
||||||
|
buffer.writeInt32LE(requestID, offset); // requestID
|
||||||
|
offset += 4;
|
||||||
|
buffer.writeInt32LE(responseTo, offset); // responseTo
|
||||||
|
offset += 4;
|
||||||
|
buffer.writeInt32LE(OP_REPLY, offset); // opCode
|
||||||
|
offset += 4;
|
||||||
|
|
||||||
|
// Write OP_REPLY fields
|
||||||
|
buffer.writeInt32LE(0, offset); // responseFlags (0 = no errors)
|
||||||
|
offset += 4;
|
||||||
|
buffer.writeBigInt64LE(cursorId, offset); // cursorID
|
||||||
|
offset += 8;
|
||||||
|
buffer.writeInt32LE(0, offset); // startingFrom
|
||||||
|
offset += 4;
|
||||||
|
buffer.writeInt32LE(documents.length, offset); // numberReturned
|
||||||
|
offset += 4;
|
||||||
|
|
||||||
|
// Write documents
|
||||||
|
for (const docBuffer of docBuffers) {
|
||||||
|
Buffer.from(docBuffer).copy(buffer, offset);
|
||||||
|
offset += docBuffer.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
return buffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Encode an error response
|
||||||
|
*/
|
||||||
|
static encodeErrorResponse(
|
||||||
|
responseTo: number,
|
||||||
|
errorCode: number,
|
||||||
|
errorMessage: string,
|
||||||
|
commandName?: string
|
||||||
|
): Buffer {
|
||||||
|
const response: plugins.bson.Document = {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: errorMessage,
|
||||||
|
code: errorCode,
|
||||||
|
codeName: this.getErrorCodeName(errorCode),
|
||||||
|
};
|
||||||
|
|
||||||
|
return this.encodeOpMsgResponse(responseTo, response);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get error code name from error code
|
||||||
|
*/
|
||||||
|
private static getErrorCodeName(code: number): string {
|
||||||
|
const errorNames: Record<number, string> = {
|
||||||
|
0: 'OK',
|
||||||
|
1: 'InternalError',
|
||||||
|
2: 'BadValue',
|
||||||
|
11000: 'DuplicateKey',
|
||||||
|
11001: 'DuplicateKeyValue',
|
||||||
|
13: 'Unauthorized',
|
||||||
|
26: 'NamespaceNotFound',
|
||||||
|
27: 'IndexNotFound',
|
||||||
|
48: 'NamespaceExists',
|
||||||
|
59: 'CommandNotFound',
|
||||||
|
66: 'ImmutableField',
|
||||||
|
73: 'InvalidNamespace',
|
||||||
|
85: 'IndexOptionsConflict',
|
||||||
|
112: 'WriteConflict',
|
||||||
|
121: 'DocumentValidationFailure',
|
||||||
|
211: 'KeyNotFound',
|
||||||
|
251: 'NoSuchTransaction',
|
||||||
|
};
|
||||||
|
|
||||||
|
return errorNames[code] || 'UnknownError';
|
||||||
|
}
|
||||||
|
}
|
||||||
719
ts/ts_tsmdb/server/handlers/AdminHandler.ts
Normal file
719
ts/ts_tsmdb/server/handlers/AdminHandler.ts
Normal file
@@ -0,0 +1,719 @@
|
|||||||
|
import * as plugins from '../../plugins.js';
|
||||||
|
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
||||||
|
import { SessionEngine } from '../../engine/SessionEngine.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AdminHandler - Handles administrative commands
|
||||||
|
*/
|
||||||
|
export class AdminHandler implements ICommandHandler {
|
||||||
|
async handle(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { command } = context;
|
||||||
|
|
||||||
|
// Determine which command to handle
|
||||||
|
if (command.ping !== undefined) {
|
||||||
|
return this.handlePing(context);
|
||||||
|
} else if (command.listDatabases !== undefined) {
|
||||||
|
return this.handleListDatabases(context);
|
||||||
|
} else if (command.listCollections !== undefined) {
|
||||||
|
return this.handleListCollections(context);
|
||||||
|
} else if (command.drop !== undefined) {
|
||||||
|
return this.handleDrop(context);
|
||||||
|
} else if (command.dropDatabase !== undefined) {
|
||||||
|
return this.handleDropDatabase(context);
|
||||||
|
} else if (command.create !== undefined) {
|
||||||
|
return this.handleCreate(context);
|
||||||
|
} else if (command.serverStatus !== undefined) {
|
||||||
|
return this.handleServerStatus(context);
|
||||||
|
} else if (command.buildInfo !== undefined) {
|
||||||
|
return this.handleBuildInfo(context);
|
||||||
|
} else if (command.whatsmyuri !== undefined) {
|
||||||
|
return this.handleWhatsMyUri(context);
|
||||||
|
} else if (command.getLog !== undefined) {
|
||||||
|
return this.handleGetLog(context);
|
||||||
|
} else if (command.hostInfo !== undefined) {
|
||||||
|
return this.handleHostInfo(context);
|
||||||
|
} else if (command.replSetGetStatus !== undefined) {
|
||||||
|
return this.handleReplSetGetStatus(context);
|
||||||
|
} else if (command.saslStart !== undefined) {
|
||||||
|
return this.handleSaslStart(context);
|
||||||
|
} else if (command.saslContinue !== undefined) {
|
||||||
|
return this.handleSaslContinue(context);
|
||||||
|
} else if (command.endSessions !== undefined) {
|
||||||
|
return this.handleEndSessions(context);
|
||||||
|
} else if (command.abortTransaction !== undefined) {
|
||||||
|
return this.handleAbortTransaction(context);
|
||||||
|
} else if (command.commitTransaction !== undefined) {
|
||||||
|
return this.handleCommitTransaction(context);
|
||||||
|
} else if (command.collStats !== undefined) {
|
||||||
|
return this.handleCollStats(context);
|
||||||
|
} else if (command.dbStats !== undefined) {
|
||||||
|
return this.handleDbStats(context);
|
||||||
|
} else if (command.connectionStatus !== undefined) {
|
||||||
|
return this.handleConnectionStatus(context);
|
||||||
|
} else if (command.currentOp !== undefined) {
|
||||||
|
return this.handleCurrentOp(context);
|
||||||
|
} else if (command.collMod !== undefined) {
|
||||||
|
return this.handleCollMod(context);
|
||||||
|
} else if (command.renameCollection !== undefined) {
|
||||||
|
return this.handleRenameCollection(context);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'Unknown admin command',
|
||||||
|
code: 59,
|
||||||
|
codeName: 'CommandNotFound',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle ping command
|
||||||
|
*/
|
||||||
|
private async handlePing(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
return { ok: 1 };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle listDatabases command
|
||||||
|
*/
|
||||||
|
private async handleListDatabases(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { storage, command } = context;
|
||||||
|
|
||||||
|
const dbNames = await storage.listDatabases();
|
||||||
|
const nameOnly = command.nameOnly || false;
|
||||||
|
|
||||||
|
if (nameOnly) {
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
databases: dbNames.map(name => ({ name })),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build database list with sizes
|
||||||
|
const databases: plugins.bson.Document[] = [];
|
||||||
|
let totalSize = 0;
|
||||||
|
|
||||||
|
for (const name of dbNames) {
|
||||||
|
const collections = await storage.listCollections(name);
|
||||||
|
let dbSize = 0;
|
||||||
|
|
||||||
|
for (const collName of collections) {
|
||||||
|
const docs = await storage.findAll(name, collName);
|
||||||
|
// Estimate size (rough approximation)
|
||||||
|
dbSize += docs.reduce((sum, doc) => sum + JSON.stringify(doc).length, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
totalSize += dbSize;
|
||||||
|
|
||||||
|
databases.push({
|
||||||
|
name,
|
||||||
|
sizeOnDisk: dbSize,
|
||||||
|
empty: dbSize === 0,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
databases,
|
||||||
|
totalSize,
|
||||||
|
totalSizeMb: totalSize / (1024 * 1024),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle listCollections command
|
||||||
|
*/
|
||||||
|
private async handleListCollections(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { storage, database, command } = context;
|
||||||
|
|
||||||
|
const filter = command.filter || {};
|
||||||
|
const nameOnly = command.nameOnly || false;
|
||||||
|
const cursor = command.cursor || {};
|
||||||
|
const batchSize = cursor.batchSize || 101;
|
||||||
|
|
||||||
|
const collNames = await storage.listCollections(database);
|
||||||
|
|
||||||
|
let collections: plugins.bson.Document[] = [];
|
||||||
|
|
||||||
|
for (const name of collNames) {
|
||||||
|
// Apply name filter
|
||||||
|
if (filter.name && filter.name !== name) {
|
||||||
|
// Check regex
|
||||||
|
if (filter.name.$regex) {
|
||||||
|
const regex = new RegExp(filter.name.$regex, filter.name.$options);
|
||||||
|
if (!regex.test(name)) continue;
|
||||||
|
} else {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nameOnly) {
|
||||||
|
collections.push({ name });
|
||||||
|
} else {
|
||||||
|
collections.push({
|
||||||
|
name,
|
||||||
|
type: 'collection',
|
||||||
|
options: {},
|
||||||
|
info: {
|
||||||
|
readOnly: false,
|
||||||
|
uuid: new plugins.bson.UUID(),
|
||||||
|
},
|
||||||
|
idIndex: {
|
||||||
|
v: 2,
|
||||||
|
key: { _id: 1 },
|
||||||
|
name: '_id_',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
cursor: {
|
||||||
|
id: plugins.bson.Long.fromNumber(0),
|
||||||
|
ns: `${database}.$cmd.listCollections`,
|
||||||
|
firstBatch: collections,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle drop command (drop collection)
|
||||||
|
*/
|
||||||
|
private async handleDrop(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { storage, database, command } = context;
|
||||||
|
|
||||||
|
const collection = command.drop;
|
||||||
|
|
||||||
|
const existed = await storage.dropCollection(database, collection);
|
||||||
|
|
||||||
|
if (!existed) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: `ns not found ${database}.${collection}`,
|
||||||
|
code: 26,
|
||||||
|
codeName: 'NamespaceNotFound',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ok: 1, ns: `${database}.${collection}` };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle dropDatabase command
|
||||||
|
*/
|
||||||
|
private async handleDropDatabase(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { storage, database } = context;
|
||||||
|
|
||||||
|
await storage.dropDatabase(database);
|
||||||
|
|
||||||
|
return { ok: 1, dropped: database };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle create command (create collection)
|
||||||
|
*/
|
||||||
|
private async handleCreate(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { storage, database, command } = context;
|
||||||
|
|
||||||
|
const collection = command.create;
|
||||||
|
|
||||||
|
// Check if already exists
|
||||||
|
const exists = await storage.collectionExists(database, collection);
|
||||||
|
if (exists) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: `Collection ${database}.${collection} already exists.`,
|
||||||
|
code: 48,
|
||||||
|
codeName: 'NamespaceExists',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
await storage.createCollection(database, collection);
|
||||||
|
|
||||||
|
return { ok: 1 };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle serverStatus command
|
||||||
|
*/
|
||||||
|
private async handleServerStatus(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { server, sessionEngine } = context;
|
||||||
|
|
||||||
|
const uptime = server.getUptime();
|
||||||
|
const connections = server.getConnectionCount();
|
||||||
|
const sessions = sessionEngine.listSessions();
|
||||||
|
const sessionsWithTxn = sessionEngine.getSessionsWithTransactions();
|
||||||
|
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
host: `${server.host}:${server.port}`,
|
||||||
|
version: '7.0.0',
|
||||||
|
process: 'tsmdb',
|
||||||
|
pid: process.pid,
|
||||||
|
uptime,
|
||||||
|
uptimeMillis: uptime * 1000,
|
||||||
|
uptimeEstimate: uptime,
|
||||||
|
localTime: new Date(),
|
||||||
|
mem: {
|
||||||
|
resident: Math.floor(process.memoryUsage().rss / (1024 * 1024)),
|
||||||
|
virtual: Math.floor(process.memoryUsage().heapTotal / (1024 * 1024)),
|
||||||
|
supported: true,
|
||||||
|
},
|
||||||
|
connections: {
|
||||||
|
current: connections,
|
||||||
|
available: 1000 - connections,
|
||||||
|
totalCreated: connections,
|
||||||
|
active: connections,
|
||||||
|
},
|
||||||
|
logicalSessionRecordCache: {
|
||||||
|
activeSessionsCount: sessions.length,
|
||||||
|
sessionsCollectionJobCount: 0,
|
||||||
|
lastSessionsCollectionJobDurationMillis: 0,
|
||||||
|
lastSessionsCollectionJobTimestamp: new Date(),
|
||||||
|
transactionReaperJobCount: 0,
|
||||||
|
lastTransactionReaperJobDurationMillis: 0,
|
||||||
|
lastTransactionReaperJobTimestamp: new Date(),
|
||||||
|
},
|
||||||
|
transactions: {
|
||||||
|
retriedCommandsCount: 0,
|
||||||
|
retriedStatementsCount: 0,
|
||||||
|
transactionsCollectionWriteCount: 0,
|
||||||
|
currentActive: sessionsWithTxn.length,
|
||||||
|
currentInactive: 0,
|
||||||
|
currentOpen: sessionsWithTxn.length,
|
||||||
|
totalStarted: sessionsWithTxn.length,
|
||||||
|
totalCommitted: 0,
|
||||||
|
totalAborted: 0,
|
||||||
|
},
|
||||||
|
network: {
|
||||||
|
bytesIn: 0,
|
||||||
|
bytesOut: 0,
|
||||||
|
numRequests: 0,
|
||||||
|
},
|
||||||
|
storageEngine: {
|
||||||
|
name: 'tsmdb',
|
||||||
|
supportsCommittedReads: true,
|
||||||
|
persistent: false,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle buildInfo command
|
||||||
|
*/
|
||||||
|
private async handleBuildInfo(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
version: '7.0.0',
|
||||||
|
gitVersion: 'tsmdb',
|
||||||
|
modules: [],
|
||||||
|
allocator: 'system',
|
||||||
|
javascriptEngine: 'none',
|
||||||
|
sysInfo: 'deprecated',
|
||||||
|
versionArray: [7, 0, 0, 0],
|
||||||
|
openssl: {
|
||||||
|
running: 'disabled',
|
||||||
|
compiled: 'disabled',
|
||||||
|
},
|
||||||
|
buildEnvironment: {
|
||||||
|
distmod: 'tsmdb',
|
||||||
|
distarch: process.arch,
|
||||||
|
cc: '',
|
||||||
|
ccflags: '',
|
||||||
|
cxx: '',
|
||||||
|
cxxflags: '',
|
||||||
|
linkflags: '',
|
||||||
|
target_arch: process.arch,
|
||||||
|
target_os: process.platform,
|
||||||
|
},
|
||||||
|
bits: 64,
|
||||||
|
debug: false,
|
||||||
|
maxBsonObjectSize: 16777216,
|
||||||
|
storageEngines: ['tsmdb'],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle whatsmyuri command
|
||||||
|
*/
|
||||||
|
private async handleWhatsMyUri(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { server } = context;
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
you: `127.0.0.1:${server.port}`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle getLog command
|
||||||
|
*/
|
||||||
|
private async handleGetLog(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { command } = context;
|
||||||
|
|
||||||
|
if (command.getLog === '*') {
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
names: ['global', 'startupWarnings'],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
totalLinesWritten: 0,
|
||||||
|
log: [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle hostInfo command
|
||||||
|
*/
|
||||||
|
private async handleHostInfo(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
system: {
|
||||||
|
currentTime: new Date(),
|
||||||
|
hostname: 'localhost',
|
||||||
|
cpuAddrSize: 64,
|
||||||
|
memSizeMB: Math.floor(process.memoryUsage().heapTotal / (1024 * 1024)),
|
||||||
|
numCores: 1,
|
||||||
|
cpuArch: process.arch,
|
||||||
|
numaEnabled: false,
|
||||||
|
},
|
||||||
|
os: {
|
||||||
|
type: process.platform,
|
||||||
|
name: process.platform,
|
||||||
|
version: process.version,
|
||||||
|
},
|
||||||
|
extra: {},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle replSetGetStatus command
|
||||||
|
*/
|
||||||
|
private async handleReplSetGetStatus(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
// We're standalone, not a replica set
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'not running with --replSet',
|
||||||
|
code: 76,
|
||||||
|
codeName: 'NoReplicationEnabled',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle saslStart command (authentication)
|
||||||
|
*/
|
||||||
|
private async handleSaslStart(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
// We don't require authentication, but we need to respond properly
|
||||||
|
// to let drivers know auth is "successful"
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
conversationId: 1,
|
||||||
|
done: true,
|
||||||
|
payload: Buffer.from([]),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle saslContinue command
|
||||||
|
*/
|
||||||
|
private async handleSaslContinue(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
conversationId: 1,
|
||||||
|
done: true,
|
||||||
|
payload: Buffer.from([]),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle endSessions command
|
||||||
|
*/
|
||||||
|
private async handleEndSessions(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { command, sessionEngine } = context;
|
||||||
|
|
||||||
|
// End each session in the array
|
||||||
|
const sessions = command.endSessions || [];
|
||||||
|
for (const sessionSpec of sessions) {
|
||||||
|
const sessionId = SessionEngine.extractSessionId(sessionSpec);
|
||||||
|
if (sessionId) {
|
||||||
|
await sessionEngine.endSession(sessionId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ok: 1 };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle abortTransaction command
|
||||||
|
*/
|
||||||
|
private async handleAbortTransaction(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { transactionEngine, sessionEngine, txnId, sessionId } = context;
|
||||||
|
|
||||||
|
if (!txnId) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'No transaction started',
|
||||||
|
code: 251,
|
||||||
|
codeName: 'NoSuchTransaction',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await transactionEngine.abortTransaction(txnId);
|
||||||
|
transactionEngine.endTransaction(txnId);
|
||||||
|
// Update session state
|
||||||
|
if (sessionId) {
|
||||||
|
sessionEngine.endTransaction(sessionId);
|
||||||
|
}
|
||||||
|
return { ok: 1 };
|
||||||
|
} catch (error: any) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: error.message || 'Abort transaction failed',
|
||||||
|
code: error.code || 1,
|
||||||
|
codeName: error.codeName || 'UnknownError',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle commitTransaction command
|
||||||
|
*/
|
||||||
|
private async handleCommitTransaction(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { transactionEngine, sessionEngine, txnId, sessionId } = context;
|
||||||
|
|
||||||
|
if (!txnId) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'No transaction started',
|
||||||
|
code: 251,
|
||||||
|
codeName: 'NoSuchTransaction',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await transactionEngine.commitTransaction(txnId);
|
||||||
|
transactionEngine.endTransaction(txnId);
|
||||||
|
// Update session state
|
||||||
|
if (sessionId) {
|
||||||
|
sessionEngine.endTransaction(sessionId);
|
||||||
|
}
|
||||||
|
return { ok: 1 };
|
||||||
|
} catch (error: any) {
|
||||||
|
// If commit fails, transaction should be aborted
|
||||||
|
try {
|
||||||
|
await transactionEngine.abortTransaction(txnId);
|
||||||
|
transactionEngine.endTransaction(txnId);
|
||||||
|
if (sessionId) {
|
||||||
|
sessionEngine.endTransaction(sessionId);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Ignore abort errors
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error.code === 112) {
|
||||||
|
// Write conflict
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: error.message || 'Write conflict during commit',
|
||||||
|
code: 112,
|
||||||
|
codeName: 'WriteConflict',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: error.message || 'Commit transaction failed',
|
||||||
|
code: error.code || 1,
|
||||||
|
codeName: error.codeName || 'UnknownError',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle collStats command
|
||||||
|
*/
|
||||||
|
private async handleCollStats(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { storage, database, command } = context;
|
||||||
|
|
||||||
|
const collection = command.collStats;
|
||||||
|
|
||||||
|
const exists = await storage.collectionExists(database, collection);
|
||||||
|
if (!exists) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: `ns not found ${database}.${collection}`,
|
||||||
|
code: 26,
|
||||||
|
codeName: 'NamespaceNotFound',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const docs = await storage.findAll(database, collection);
|
||||||
|
const size = docs.reduce((sum, doc) => sum + JSON.stringify(doc).length, 0);
|
||||||
|
const count = docs.length;
|
||||||
|
const avgObjSize = count > 0 ? size / count : 0;
|
||||||
|
|
||||||
|
const indexes = await storage.getIndexes(database, collection);
|
||||||
|
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
ns: `${database}.${collection}`,
|
||||||
|
count,
|
||||||
|
size,
|
||||||
|
avgObjSize,
|
||||||
|
storageSize: size,
|
||||||
|
totalIndexSize: 0,
|
||||||
|
indexSizes: indexes.reduce((acc: any, idx: any) => {
|
||||||
|
acc[idx.name] = 0;
|
||||||
|
return acc;
|
||||||
|
}, {}),
|
||||||
|
nindexes: indexes.length,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle dbStats command
|
||||||
|
*/
|
||||||
|
private async handleDbStats(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { storage, database } = context;
|
||||||
|
|
||||||
|
const collections = await storage.listCollections(database);
|
||||||
|
let totalSize = 0;
|
||||||
|
let totalObjects = 0;
|
||||||
|
|
||||||
|
for (const collName of collections) {
|
||||||
|
const docs = await storage.findAll(database, collName);
|
||||||
|
totalObjects += docs.length;
|
||||||
|
totalSize += docs.reduce((sum, doc) => sum + JSON.stringify(doc).length, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
db: database,
|
||||||
|
collections: collections.length,
|
||||||
|
views: 0,
|
||||||
|
objects: totalObjects,
|
||||||
|
avgObjSize: totalObjects > 0 ? totalSize / totalObjects : 0,
|
||||||
|
dataSize: totalSize,
|
||||||
|
storageSize: totalSize,
|
||||||
|
indexes: collections.length, // At least _id index per collection
|
||||||
|
indexSize: 0,
|
||||||
|
totalSize,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle connectionStatus command
|
||||||
|
*/
|
||||||
|
private async handleConnectionStatus(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
authInfo: {
|
||||||
|
authenticatedUsers: [],
|
||||||
|
authenticatedUserRoles: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle currentOp command
|
||||||
|
*/
|
||||||
|
private async handleCurrentOp(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
inprog: [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle collMod command
|
||||||
|
*/
|
||||||
|
private async handleCollMod(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
// We don't support modifying collection options, but acknowledge the command
|
||||||
|
return { ok: 1 };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle renameCollection command
|
||||||
|
*/
|
||||||
|
private async handleRenameCollection(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { storage, command } = context;
|
||||||
|
|
||||||
|
const from = command.renameCollection;
|
||||||
|
const to = command.to;
|
||||||
|
const dropTarget = command.dropTarget || false;
|
||||||
|
|
||||||
|
if (!from || !to) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'renameCollection requires both source and target',
|
||||||
|
code: 2,
|
||||||
|
codeName: 'BadValue',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse namespace (format: "db.collection")
|
||||||
|
const fromParts = from.split('.');
|
||||||
|
const toParts = to.split('.');
|
||||||
|
|
||||||
|
if (fromParts.length < 2 || toParts.length < 2) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'Invalid namespace format',
|
||||||
|
code: 73,
|
||||||
|
codeName: 'InvalidNamespace',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const fromDb = fromParts[0];
|
||||||
|
const fromColl = fromParts.slice(1).join('.');
|
||||||
|
const toDb = toParts[0];
|
||||||
|
const toColl = toParts.slice(1).join('.');
|
||||||
|
|
||||||
|
// Check if source exists
|
||||||
|
const sourceExists = await storage.collectionExists(fromDb, fromColl);
|
||||||
|
if (!sourceExists) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: `source namespace ${from} does not exist`,
|
||||||
|
code: 26,
|
||||||
|
codeName: 'NamespaceNotFound',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if target exists
|
||||||
|
const targetExists = await storage.collectionExists(toDb, toColl);
|
||||||
|
if (targetExists) {
|
||||||
|
if (dropTarget) {
|
||||||
|
await storage.dropCollection(toDb, toColl);
|
||||||
|
} else {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: `target namespace ${to} already exists`,
|
||||||
|
code: 48,
|
||||||
|
codeName: 'NamespaceExists',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Same database rename
|
||||||
|
if (fromDb === toDb) {
|
||||||
|
await storage.renameCollection(fromDb, fromColl, toColl);
|
||||||
|
} else {
|
||||||
|
// Cross-database rename: copy documents then drop source
|
||||||
|
await storage.createCollection(toDb, toColl);
|
||||||
|
const docs = await storage.findAll(fromDb, fromColl);
|
||||||
|
|
||||||
|
for (const doc of docs) {
|
||||||
|
await storage.insertOne(toDb, toColl, doc);
|
||||||
|
}
|
||||||
|
|
||||||
|
await storage.dropCollection(fromDb, fromColl);
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ok: 1 };
|
||||||
|
}
|
||||||
|
}
|
||||||
342
ts/ts_tsmdb/server/handlers/AggregateHandler.ts
Normal file
342
ts/ts_tsmdb/server/handlers/AggregateHandler.ts
Normal file
@@ -0,0 +1,342 @@
|
|||||||
|
import * as plugins from '../../plugins.js';
|
||||||
|
import type { ICommandHandler, IHandlerContext, ICursorState } from '../CommandRouter.js';
|
||||||
|
import { AggregationEngine } from '../../engine/AggregationEngine.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AggregateHandler - Handles aggregate command
|
||||||
|
*/
|
||||||
|
export class AggregateHandler implements ICommandHandler {
|
||||||
|
private cursors: Map<bigint, ICursorState>;
|
||||||
|
private nextCursorId: () => bigint;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
cursors: Map<bigint, ICursorState>,
|
||||||
|
nextCursorId: () => bigint
|
||||||
|
) {
|
||||||
|
this.cursors = cursors;
|
||||||
|
this.nextCursorId = nextCursorId;
|
||||||
|
}
|
||||||
|
|
||||||
|
async handle(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { storage, database, command } = context;
|
||||||
|
|
||||||
|
const collection = command.aggregate;
|
||||||
|
const pipeline = command.pipeline || [];
|
||||||
|
const cursor = command.cursor || {};
|
||||||
|
const batchSize = cursor.batchSize || 101;
|
||||||
|
|
||||||
|
// Validate
|
||||||
|
if (typeof collection !== 'string' && collection !== 1) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'aggregate command requires a collection name or 1',
|
||||||
|
code: 2,
|
||||||
|
codeName: 'BadValue',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Array.isArray(pipeline)) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'pipeline must be an array',
|
||||||
|
code: 2,
|
||||||
|
codeName: 'BadValue',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get source documents
|
||||||
|
let documents: plugins.bson.Document[] = [];
|
||||||
|
|
||||||
|
if (collection === 1 || collection === '1') {
|
||||||
|
// Database-level aggregation (e.g., $listLocalSessions)
|
||||||
|
documents = [];
|
||||||
|
} else {
|
||||||
|
// Collection-level aggregation
|
||||||
|
const exists = await storage.collectionExists(database, collection);
|
||||||
|
if (exists) {
|
||||||
|
documents = await storage.findAll(database, collection);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle $lookup and $graphLookup stages that reference other collections
|
||||||
|
const processedPipeline = await this.preprocessPipeline(
|
||||||
|
storage,
|
||||||
|
database,
|
||||||
|
pipeline,
|
||||||
|
documents
|
||||||
|
);
|
||||||
|
|
||||||
|
// Run aggregation
|
||||||
|
let results: plugins.bson.Document[];
|
||||||
|
|
||||||
|
// Check for special stages that we handle manually
|
||||||
|
if (this.hasSpecialStages(pipeline)) {
|
||||||
|
results = await this.executeWithSpecialStages(
|
||||||
|
storage,
|
||||||
|
database,
|
||||||
|
documents,
|
||||||
|
pipeline
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
results = AggregationEngine.aggregate(documents as any, processedPipeline);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle $out and $merge stages
|
||||||
|
const lastStage = pipeline[pipeline.length - 1];
|
||||||
|
if (lastStage && lastStage.$out) {
|
||||||
|
await this.handleOut(storage, database, results, lastStage.$out);
|
||||||
|
return { ok: 1, cursor: { id: plugins.bson.Long.fromNumber(0), ns: `${database}.${collection}`, firstBatch: [] } };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (lastStage && lastStage.$merge) {
|
||||||
|
await this.handleMerge(storage, database, results, lastStage.$merge);
|
||||||
|
return { ok: 1, cursor: { id: plugins.bson.Long.fromNumber(0), ns: `${database}.${collection}`, firstBatch: [] } };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build cursor response
|
||||||
|
const effectiveBatchSize = Math.min(batchSize, results.length);
|
||||||
|
const firstBatch = results.slice(0, effectiveBatchSize);
|
||||||
|
const remaining = results.slice(effectiveBatchSize);
|
||||||
|
|
||||||
|
let cursorId = BigInt(0);
|
||||||
|
if (remaining.length > 0) {
|
||||||
|
cursorId = this.nextCursorId();
|
||||||
|
this.cursors.set(cursorId, {
|
||||||
|
id: cursorId,
|
||||||
|
database,
|
||||||
|
collection: typeof collection === 'string' ? collection : '$cmd.aggregate',
|
||||||
|
documents: remaining,
|
||||||
|
position: 0,
|
||||||
|
batchSize,
|
||||||
|
createdAt: new Date(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
cursor: {
|
||||||
|
id: plugins.bson.Long.fromBigInt(cursorId),
|
||||||
|
ns: `${database}.${typeof collection === 'string' ? collection : '$cmd.aggregate'}`,
|
||||||
|
firstBatch,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
} catch (error: any) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: error.message || 'Aggregation failed',
|
||||||
|
code: 1,
|
||||||
|
codeName: 'InternalError',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Preprocess pipeline to handle cross-collection lookups
|
||||||
|
*/
|
||||||
|
private async preprocessPipeline(
|
||||||
|
storage: any,
|
||||||
|
database: string,
|
||||||
|
pipeline: plugins.bson.Document[],
|
||||||
|
documents: plugins.bson.Document[]
|
||||||
|
): Promise<plugins.bson.Document[]> {
|
||||||
|
// For now, return the pipeline as-is
|
||||||
|
// Cross-collection lookups are handled in executeWithSpecialStages
|
||||||
|
return pipeline;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if pipeline has stages that need special handling
|
||||||
|
*/
|
||||||
|
private hasSpecialStages(pipeline: plugins.bson.Document[]): boolean {
|
||||||
|
return pipeline.some(stage =>
|
||||||
|
stage.$lookup ||
|
||||||
|
stage.$graphLookup ||
|
||||||
|
stage.$unionWith
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute pipeline with special stage handling
|
||||||
|
*/
|
||||||
|
private async executeWithSpecialStages(
|
||||||
|
storage: any,
|
||||||
|
database: string,
|
||||||
|
documents: plugins.bson.Document[],
|
||||||
|
pipeline: plugins.bson.Document[]
|
||||||
|
): Promise<plugins.bson.Document[]> {
|
||||||
|
let results: plugins.bson.Document[] = [...documents];
|
||||||
|
|
||||||
|
for (const stage of pipeline) {
|
||||||
|
if (stage.$lookup) {
|
||||||
|
const lookupSpec = stage.$lookup;
|
||||||
|
const fromCollection = lookupSpec.from;
|
||||||
|
|
||||||
|
// Get foreign collection documents
|
||||||
|
const foreignExists = await storage.collectionExists(database, fromCollection);
|
||||||
|
const foreignDocs = foreignExists
|
||||||
|
? await storage.findAll(database, fromCollection)
|
||||||
|
: [];
|
||||||
|
|
||||||
|
results = AggregationEngine.executeLookup(results as any, lookupSpec, foreignDocs);
|
||||||
|
} else if (stage.$graphLookup) {
|
||||||
|
const graphLookupSpec = stage.$graphLookup;
|
||||||
|
const fromCollection = graphLookupSpec.from;
|
||||||
|
|
||||||
|
const foreignExists = await storage.collectionExists(database, fromCollection);
|
||||||
|
const foreignDocs = foreignExists
|
||||||
|
? await storage.findAll(database, fromCollection)
|
||||||
|
: [];
|
||||||
|
|
||||||
|
results = AggregationEngine.executeGraphLookup(results as any, graphLookupSpec, foreignDocs);
|
||||||
|
} else if (stage.$unionWith) {
|
||||||
|
let unionSpec = stage.$unionWith;
|
||||||
|
let unionColl: string;
|
||||||
|
let unionPipeline: plugins.bson.Document[] | undefined;
|
||||||
|
|
||||||
|
if (typeof unionSpec === 'string') {
|
||||||
|
unionColl = unionSpec;
|
||||||
|
} else {
|
||||||
|
unionColl = unionSpec.coll;
|
||||||
|
unionPipeline = unionSpec.pipeline;
|
||||||
|
}
|
||||||
|
|
||||||
|
const unionExists = await storage.collectionExists(database, unionColl);
|
||||||
|
const unionDocs = unionExists
|
||||||
|
? await storage.findAll(database, unionColl)
|
||||||
|
: [];
|
||||||
|
|
||||||
|
results = AggregationEngine.executeUnionWith(results as any, unionDocs, unionPipeline);
|
||||||
|
} else if (stage.$facet) {
|
||||||
|
// Execute each facet pipeline separately
|
||||||
|
const facetResults: plugins.bson.Document = {};
|
||||||
|
|
||||||
|
for (const [facetName, facetPipeline] of Object.entries(stage.$facet)) {
|
||||||
|
const facetDocs = await this.executeWithSpecialStages(
|
||||||
|
storage,
|
||||||
|
database,
|
||||||
|
results,
|
||||||
|
facetPipeline as plugins.bson.Document[]
|
||||||
|
);
|
||||||
|
facetResults[facetName] = facetDocs;
|
||||||
|
}
|
||||||
|
|
||||||
|
results = [facetResults];
|
||||||
|
} else {
|
||||||
|
// Regular stage - pass to mingo
|
||||||
|
results = AggregationEngine.aggregate(results as any, [stage]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle $out stage - write results to a collection
|
||||||
|
*/
|
||||||
|
private async handleOut(
|
||||||
|
storage: any,
|
||||||
|
database: string,
|
||||||
|
results: plugins.bson.Document[],
|
||||||
|
outSpec: string | { db?: string; coll: string }
|
||||||
|
): Promise<void> {
|
||||||
|
let targetDb = database;
|
||||||
|
let targetColl: string;
|
||||||
|
|
||||||
|
if (typeof outSpec === 'string') {
|
||||||
|
targetColl = outSpec;
|
||||||
|
} else {
|
||||||
|
targetDb = outSpec.db || database;
|
||||||
|
targetColl = outSpec.coll;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Drop existing collection
|
||||||
|
await storage.dropCollection(targetDb, targetColl);
|
||||||
|
|
||||||
|
// Create new collection and insert results
|
||||||
|
await storage.createCollection(targetDb, targetColl);
|
||||||
|
|
||||||
|
for (const doc of results) {
|
||||||
|
if (!doc._id) {
|
||||||
|
doc._id = new plugins.bson.ObjectId();
|
||||||
|
}
|
||||||
|
await storage.insertOne(targetDb, targetColl, doc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle $merge stage - merge results into a collection
|
||||||
|
*/
|
||||||
|
private async handleMerge(
|
||||||
|
storage: any,
|
||||||
|
database: string,
|
||||||
|
results: plugins.bson.Document[],
|
||||||
|
mergeSpec: any
|
||||||
|
): Promise<void> {
|
||||||
|
let targetDb = database;
|
||||||
|
let targetColl: string;
|
||||||
|
|
||||||
|
if (typeof mergeSpec === 'string') {
|
||||||
|
targetColl = mergeSpec;
|
||||||
|
} else if (typeof mergeSpec.into === 'string') {
|
||||||
|
targetColl = mergeSpec.into;
|
||||||
|
} else {
|
||||||
|
targetDb = mergeSpec.into.db || database;
|
||||||
|
targetColl = mergeSpec.into.coll;
|
||||||
|
}
|
||||||
|
|
||||||
|
const on = mergeSpec.on || '_id';
|
||||||
|
const whenMatched = mergeSpec.whenMatched || 'merge';
|
||||||
|
const whenNotMatched = mergeSpec.whenNotMatched || 'insert';
|
||||||
|
|
||||||
|
// Ensure target collection exists
|
||||||
|
await storage.createCollection(targetDb, targetColl);
|
||||||
|
|
||||||
|
for (const doc of results) {
|
||||||
|
// Find matching document
|
||||||
|
const existingDocs = await storage.findAll(targetDb, targetColl);
|
||||||
|
const onFields = Array.isArray(on) ? on : [on];
|
||||||
|
|
||||||
|
let matchingDoc = null;
|
||||||
|
for (const existing of existingDocs) {
|
||||||
|
let matches = true;
|
||||||
|
for (const field of onFields) {
|
||||||
|
if (JSON.stringify(existing[field]) !== JSON.stringify(doc[field])) {
|
||||||
|
matches = false;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (matches) {
|
||||||
|
matchingDoc = existing;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (matchingDoc) {
|
||||||
|
// Handle whenMatched
|
||||||
|
if (whenMatched === 'replace') {
|
||||||
|
await storage.updateById(targetDb, targetColl, matchingDoc._id, doc);
|
||||||
|
} else if (whenMatched === 'keepExisting') {
|
||||||
|
// Do nothing
|
||||||
|
} else if (whenMatched === 'merge') {
|
||||||
|
const merged = { ...matchingDoc, ...doc };
|
||||||
|
await storage.updateById(targetDb, targetColl, matchingDoc._id, merged);
|
||||||
|
} else if (whenMatched === 'fail') {
|
||||||
|
throw new Error('Document matched but whenMatched is fail');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Handle whenNotMatched
|
||||||
|
if (whenNotMatched === 'insert') {
|
||||||
|
if (!doc._id) {
|
||||||
|
doc._id = new plugins.bson.ObjectId();
|
||||||
|
}
|
||||||
|
await storage.insertOne(targetDb, targetColl, doc);
|
||||||
|
} else if (whenNotMatched === 'discard') {
|
||||||
|
// Do nothing
|
||||||
|
} else if (whenNotMatched === 'fail') {
|
||||||
|
throw new Error('Document not matched but whenNotMatched is fail');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
115
ts/ts_tsmdb/server/handlers/DeleteHandler.ts
Normal file
115
ts/ts_tsmdb/server/handlers/DeleteHandler.ts
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
import * as plugins from '../../plugins.js';
|
||||||
|
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
||||||
|
import type { IStoredDocument } from '../../types/interfaces.js';
|
||||||
|
import { QueryEngine } from '../../engine/QueryEngine.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DeleteHandler - Handles delete commands
|
||||||
|
*/
|
||||||
|
export class DeleteHandler implements ICommandHandler {
|
||||||
|
async handle(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { storage, database, command, documentSequences } = context;
|
||||||
|
|
||||||
|
const collection = command.delete;
|
||||||
|
if (typeof collection !== 'string') {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'delete command requires a collection name',
|
||||||
|
code: 2,
|
||||||
|
codeName: 'BadValue',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get deletes from command or document sequences
|
||||||
|
let deletes: plugins.bson.Document[] = command.deletes || [];
|
||||||
|
|
||||||
|
// Check for OP_MSG document sequences
|
||||||
|
if (documentSequences && documentSequences.has('deletes')) {
|
||||||
|
deletes = documentSequences.get('deletes')!;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Array.isArray(deletes) || deletes.length === 0) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'delete command requires deletes array',
|
||||||
|
code: 2,
|
||||||
|
codeName: 'BadValue',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const ordered = command.ordered !== false;
|
||||||
|
const writeErrors: plugins.bson.Document[] = [];
|
||||||
|
let totalDeleted = 0;
|
||||||
|
|
||||||
|
// Check if collection exists
|
||||||
|
const exists = await storage.collectionExists(database, collection);
|
||||||
|
if (!exists) {
|
||||||
|
// Collection doesn't exist, return success with 0 deleted
|
||||||
|
return { ok: 1, n: 0 };
|
||||||
|
}
|
||||||
|
|
||||||
|
const indexEngine = context.getIndexEngine(collection);
|
||||||
|
|
||||||
|
for (let i = 0; i < deletes.length; i++) {
|
||||||
|
const deleteSpec = deletes[i];
|
||||||
|
const filter = deleteSpec.q || deleteSpec.filter || {};
|
||||||
|
const limit = deleteSpec.limit;
|
||||||
|
|
||||||
|
// limit: 0 means delete all matching, limit: 1 means delete one
|
||||||
|
const deleteAll = limit === 0;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Try to use index-accelerated query
|
||||||
|
const candidateIds = await indexEngine.findCandidateIds(filter);
|
||||||
|
|
||||||
|
let documents: IStoredDocument[];
|
||||||
|
if (candidateIds !== null) {
|
||||||
|
documents = await storage.findByIds(database, collection, candidateIds);
|
||||||
|
} else {
|
||||||
|
documents = await storage.findAll(database, collection);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply filter
|
||||||
|
const matchingDocs = QueryEngine.filter(documents, filter);
|
||||||
|
|
||||||
|
if (matchingDocs.length === 0) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine which documents to delete
|
||||||
|
const docsToDelete = deleteAll ? matchingDocs : matchingDocs.slice(0, 1);
|
||||||
|
|
||||||
|
// Update indexes for deleted documents
|
||||||
|
for (const doc of docsToDelete) {
|
||||||
|
await indexEngine.onDelete(doc as any);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete the documents
|
||||||
|
const idsToDelete = docsToDelete.map(doc => doc._id);
|
||||||
|
const deleted = await storage.deleteByIds(database, collection, idsToDelete);
|
||||||
|
totalDeleted += deleted;
|
||||||
|
} catch (error: any) {
|
||||||
|
writeErrors.push({
|
||||||
|
index: i,
|
||||||
|
code: error.code || 1,
|
||||||
|
errmsg: error.message || 'Delete failed',
|
||||||
|
});
|
||||||
|
|
||||||
|
if (ordered) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const response: plugins.bson.Document = {
|
||||||
|
ok: 1,
|
||||||
|
n: totalDeleted,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (writeErrors.length > 0) {
|
||||||
|
response.writeErrors = writeErrors;
|
||||||
|
}
|
||||||
|
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
}
|
||||||
330
ts/ts_tsmdb/server/handlers/FindHandler.ts
Normal file
330
ts/ts_tsmdb/server/handlers/FindHandler.ts
Normal file
@@ -0,0 +1,330 @@
|
|||||||
|
import * as plugins from '../../plugins.js';
|
||||||
|
import type { ICommandHandler, IHandlerContext, ICursorState } from '../CommandRouter.js';
|
||||||
|
import type { IStoredDocument } from '../../types/interfaces.js';
|
||||||
|
import { QueryEngine } from '../../engine/QueryEngine.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* FindHandler - Handles find, getMore, killCursors, count, distinct commands
|
||||||
|
*/
|
||||||
|
export class FindHandler implements ICommandHandler {
|
||||||
|
private cursors: Map<bigint, ICursorState>;
|
||||||
|
private nextCursorId: () => bigint;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
cursors: Map<bigint, ICursorState>,
|
||||||
|
nextCursorId: () => bigint
|
||||||
|
) {
|
||||||
|
this.cursors = cursors;
|
||||||
|
this.nextCursorId = nextCursorId;
|
||||||
|
}
|
||||||
|
|
||||||
|
async handle(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { command } = context;
|
||||||
|
|
||||||
|
// Determine which operation to perform
|
||||||
|
if (command.find) {
|
||||||
|
return this.handleFind(context);
|
||||||
|
} else if (command.getMore !== undefined) {
|
||||||
|
return this.handleGetMore(context);
|
||||||
|
} else if (command.killCursors) {
|
||||||
|
return this.handleKillCursors(context);
|
||||||
|
} else if (command.count) {
|
||||||
|
return this.handleCount(context);
|
||||||
|
} else if (command.distinct) {
|
||||||
|
return this.handleDistinct(context);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'Unknown find-related command',
|
||||||
|
code: 59,
|
||||||
|
codeName: 'CommandNotFound',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle find command
|
||||||
|
*/
|
||||||
|
private async handleFind(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { storage, database, command, getIndexEngine } = context;
|
||||||
|
|
||||||
|
const collection = command.find;
|
||||||
|
const filter = command.filter || {};
|
||||||
|
const projection = command.projection;
|
||||||
|
const sort = command.sort;
|
||||||
|
const skip = command.skip || 0;
|
||||||
|
const limit = command.limit || 0;
|
||||||
|
const batchSize = command.batchSize || 101;
|
||||||
|
const singleBatch = command.singleBatch || false;
|
||||||
|
|
||||||
|
// Ensure collection exists
|
||||||
|
const exists = await storage.collectionExists(database, collection);
|
||||||
|
if (!exists) {
|
||||||
|
// Return empty cursor for non-existent collection
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
cursor: {
|
||||||
|
id: plugins.bson.Long.fromNumber(0),
|
||||||
|
ns: `${database}.${collection}`,
|
||||||
|
firstBatch: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to use index-accelerated query
|
||||||
|
const indexEngine = getIndexEngine(collection);
|
||||||
|
const candidateIds = await indexEngine.findCandidateIds(filter);
|
||||||
|
|
||||||
|
let documents: IStoredDocument[];
|
||||||
|
if (candidateIds !== null) {
|
||||||
|
// Index hit - fetch only candidate documents
|
||||||
|
documents = await storage.findByIds(database, collection, candidateIds);
|
||||||
|
// Still apply filter for any conditions the index couldn't fully satisfy
|
||||||
|
documents = QueryEngine.filter(documents, filter);
|
||||||
|
} else {
|
||||||
|
// No suitable index - full collection scan
|
||||||
|
documents = await storage.findAll(database, collection);
|
||||||
|
// Apply filter
|
||||||
|
documents = QueryEngine.filter(documents, filter);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply sort
|
||||||
|
if (sort) {
|
||||||
|
documents = QueryEngine.sort(documents, sort);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply skip
|
||||||
|
if (skip > 0) {
|
||||||
|
documents = documents.slice(skip);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply limit
|
||||||
|
if (limit > 0) {
|
||||||
|
documents = documents.slice(0, limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply projection
|
||||||
|
if (projection) {
|
||||||
|
documents = QueryEngine.project(documents, projection) as any[];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine how many documents to return in first batch
|
||||||
|
const effectiveBatchSize = Math.min(batchSize, documents.length);
|
||||||
|
const firstBatch = documents.slice(0, effectiveBatchSize);
|
||||||
|
const remaining = documents.slice(effectiveBatchSize);
|
||||||
|
|
||||||
|
// Create cursor if there are more documents
|
||||||
|
let cursorId = BigInt(0);
|
||||||
|
if (remaining.length > 0 && !singleBatch) {
|
||||||
|
cursorId = this.nextCursorId();
|
||||||
|
this.cursors.set(cursorId, {
|
||||||
|
id: cursorId,
|
||||||
|
database,
|
||||||
|
collection,
|
||||||
|
documents: remaining,
|
||||||
|
position: 0,
|
||||||
|
batchSize,
|
||||||
|
createdAt: new Date(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
cursor: {
|
||||||
|
id: plugins.bson.Long.fromBigInt(cursorId),
|
||||||
|
ns: `${database}.${collection}`,
|
||||||
|
firstBatch,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle getMore command
|
||||||
|
*/
|
||||||
|
private async handleGetMore(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { database, command } = context;
|
||||||
|
|
||||||
|
const cursorIdInput = command.getMore;
|
||||||
|
const collection = command.collection;
|
||||||
|
const batchSize = command.batchSize || 101;
|
||||||
|
|
||||||
|
// Convert cursorId to bigint
|
||||||
|
let cursorId: bigint;
|
||||||
|
if (typeof cursorIdInput === 'bigint') {
|
||||||
|
cursorId = cursorIdInput;
|
||||||
|
} else if (cursorIdInput instanceof plugins.bson.Long) {
|
||||||
|
cursorId = cursorIdInput.toBigInt();
|
||||||
|
} else {
|
||||||
|
cursorId = BigInt(cursorIdInput);
|
||||||
|
}
|
||||||
|
|
||||||
|
const cursor = this.cursors.get(cursorId);
|
||||||
|
if (!cursor) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: `cursor id ${cursorId} not found`,
|
||||||
|
code: 43,
|
||||||
|
codeName: 'CursorNotFound',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify namespace
|
||||||
|
if (cursor.database !== database || cursor.collection !== collection) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'cursor namespace mismatch',
|
||||||
|
code: 43,
|
||||||
|
codeName: 'CursorNotFound',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get next batch
|
||||||
|
const start = cursor.position;
|
||||||
|
const end = Math.min(start + batchSize, cursor.documents.length);
|
||||||
|
const nextBatch = cursor.documents.slice(start, end);
|
||||||
|
cursor.position = end;
|
||||||
|
|
||||||
|
// Check if cursor is exhausted
|
||||||
|
let returnCursorId = cursorId;
|
||||||
|
if (cursor.position >= cursor.documents.length) {
|
||||||
|
this.cursors.delete(cursorId);
|
||||||
|
returnCursorId = BigInt(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
cursor: {
|
||||||
|
id: plugins.bson.Long.fromBigInt(returnCursorId),
|
||||||
|
ns: `${database}.${collection}`,
|
||||||
|
nextBatch,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle killCursors command
|
||||||
|
*/
|
||||||
|
private async handleKillCursors(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { command } = context;
|
||||||
|
|
||||||
|
const collection = command.killCursors;
|
||||||
|
const cursorIds = command.cursors || [];
|
||||||
|
|
||||||
|
const cursorsKilled: plugins.bson.Long[] = [];
|
||||||
|
const cursorsNotFound: plugins.bson.Long[] = [];
|
||||||
|
const cursorsUnknown: plugins.bson.Long[] = [];
|
||||||
|
|
||||||
|
for (const idInput of cursorIds) {
|
||||||
|
let cursorId: bigint;
|
||||||
|
if (typeof idInput === 'bigint') {
|
||||||
|
cursorId = idInput;
|
||||||
|
} else if (idInput instanceof plugins.bson.Long) {
|
||||||
|
cursorId = idInput.toBigInt();
|
||||||
|
} else {
|
||||||
|
cursorId = BigInt(idInput);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.cursors.has(cursorId)) {
|
||||||
|
this.cursors.delete(cursorId);
|
||||||
|
cursorsKilled.push(plugins.bson.Long.fromBigInt(cursorId));
|
||||||
|
} else {
|
||||||
|
cursorsNotFound.push(plugins.bson.Long.fromBigInt(cursorId));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
cursorsKilled,
|
||||||
|
cursorsNotFound,
|
||||||
|
cursorsUnknown,
|
||||||
|
cursorsAlive: [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle count command
|
||||||
|
*/
|
||||||
|
private async handleCount(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { storage, database, command, getIndexEngine } = context;
|
||||||
|
|
||||||
|
const collection = command.count;
|
||||||
|
const query = command.query || {};
|
||||||
|
const skip = command.skip || 0;
|
||||||
|
const limit = command.limit || 0;
|
||||||
|
|
||||||
|
// Check if collection exists
|
||||||
|
const exists = await storage.collectionExists(database, collection);
|
||||||
|
if (!exists) {
|
||||||
|
return { ok: 1, n: 0 };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to use index-accelerated query
|
||||||
|
const indexEngine = getIndexEngine(collection);
|
||||||
|
const candidateIds = await indexEngine.findCandidateIds(query);
|
||||||
|
|
||||||
|
let documents: IStoredDocument[];
|
||||||
|
if (candidateIds !== null) {
|
||||||
|
// Index hit - fetch only candidate documents
|
||||||
|
documents = await storage.findByIds(database, collection, candidateIds);
|
||||||
|
documents = QueryEngine.filter(documents, query);
|
||||||
|
} else {
|
||||||
|
// No suitable index - full collection scan
|
||||||
|
documents = await storage.findAll(database, collection);
|
||||||
|
documents = QueryEngine.filter(documents, query);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply skip
|
||||||
|
if (skip > 0) {
|
||||||
|
documents = documents.slice(skip);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply limit
|
||||||
|
if (limit > 0) {
|
||||||
|
documents = documents.slice(0, limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ok: 1, n: documents.length };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle distinct command
|
||||||
|
*/
|
||||||
|
private async handleDistinct(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { storage, database, command, getIndexEngine } = context;
|
||||||
|
|
||||||
|
const collection = command.distinct;
|
||||||
|
const key = command.key;
|
||||||
|
const query = command.query || {};
|
||||||
|
|
||||||
|
if (!key) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'distinct requires a key',
|
||||||
|
code: 2,
|
||||||
|
codeName: 'BadValue',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if collection exists
|
||||||
|
const exists = await storage.collectionExists(database, collection);
|
||||||
|
if (!exists) {
|
||||||
|
return { ok: 1, values: [] };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to use index-accelerated query
|
||||||
|
const indexEngine = getIndexEngine(collection);
|
||||||
|
const candidateIds = await indexEngine.findCandidateIds(query);
|
||||||
|
|
||||||
|
let documents: IStoredDocument[];
|
||||||
|
if (candidateIds !== null) {
|
||||||
|
documents = await storage.findByIds(database, collection, candidateIds);
|
||||||
|
} else {
|
||||||
|
documents = await storage.findAll(database, collection);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get distinct values
|
||||||
|
const values = QueryEngine.distinct(documents, key, query);
|
||||||
|
|
||||||
|
return { ok: 1, values };
|
||||||
|
}
|
||||||
|
}
|
||||||
78
ts/ts_tsmdb/server/handlers/HelloHandler.ts
Normal file
78
ts/ts_tsmdb/server/handlers/HelloHandler.ts
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
import * as plugins from '../../plugins.js';
|
||||||
|
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* HelloHandler - Handles hello/isMaster handshake commands
|
||||||
|
*
|
||||||
|
* This is the first command sent by MongoDB drivers to establish a connection.
|
||||||
|
* It returns server capabilities and configuration.
|
||||||
|
*/
|
||||||
|
export class HelloHandler implements ICommandHandler {
|
||||||
|
async handle(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { command, server } = context;
|
||||||
|
|
||||||
|
// Build response with server capabilities
|
||||||
|
const response: plugins.bson.Document = {
|
||||||
|
ismaster: true,
|
||||||
|
ok: 1,
|
||||||
|
|
||||||
|
// Maximum sizes
|
||||||
|
maxBsonObjectSize: 16777216, // 16 MB
|
||||||
|
maxMessageSizeBytes: 48000000, // 48 MB
|
||||||
|
maxWriteBatchSize: 100000, // 100k documents per batch
|
||||||
|
|
||||||
|
// Timestamps
|
||||||
|
localTime: new Date(),
|
||||||
|
|
||||||
|
// Session support
|
||||||
|
logicalSessionTimeoutMinutes: 30,
|
||||||
|
|
||||||
|
// Connection info
|
||||||
|
connectionId: 1,
|
||||||
|
|
||||||
|
// Wire protocol versions (support MongoDB 3.6 through 7.0)
|
||||||
|
minWireVersion: 0,
|
||||||
|
maxWireVersion: 21,
|
||||||
|
|
||||||
|
// Server mode
|
||||||
|
readOnly: false,
|
||||||
|
|
||||||
|
// Topology info (standalone mode)
|
||||||
|
isWritablePrimary: true,
|
||||||
|
|
||||||
|
// Additional info
|
||||||
|
topologyVersion: {
|
||||||
|
processId: new plugins.bson.ObjectId(),
|
||||||
|
counter: plugins.bson.Long.fromNumber(0),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Handle hello-specific fields
|
||||||
|
if (command.hello || command.hello === 1) {
|
||||||
|
response.helloOk = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle client metadata
|
||||||
|
if (command.client) {
|
||||||
|
// Client is providing metadata about itself
|
||||||
|
// We just acknowledge it - no need to do anything special
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle SASL mechanisms query
|
||||||
|
if (command.saslSupportedMechs) {
|
||||||
|
response.saslSupportedMechs = [
|
||||||
|
// We don't actually support auth, but the driver needs to see this
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compression support (none for now)
|
||||||
|
if (command.compression) {
|
||||||
|
response.compression = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Server version info
|
||||||
|
response.version = '7.0.0';
|
||||||
|
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
}
|
||||||
207
ts/ts_tsmdb/server/handlers/IndexHandler.ts
Normal file
207
ts/ts_tsmdb/server/handlers/IndexHandler.ts
Normal file
@@ -0,0 +1,207 @@
|
|||||||
|
import * as plugins from '../../plugins.js';
|
||||||
|
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
||||||
|
import { IndexEngine } from '../../engine/IndexEngine.js';
|
||||||
|
|
||||||
|
// Cache of index engines per collection
|
||||||
|
const indexEngines: Map<string, IndexEngine> = new Map();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get or create an IndexEngine for a collection
|
||||||
|
*/
|
||||||
|
function getIndexEngine(storage: any, database: string, collection: string): IndexEngine {
|
||||||
|
const key = `${database}.${collection}`;
|
||||||
|
let engine = indexEngines.get(key);
|
||||||
|
|
||||||
|
if (!engine) {
|
||||||
|
engine = new IndexEngine(database, collection, storage);
|
||||||
|
indexEngines.set(key, engine);
|
||||||
|
}
|
||||||
|
|
||||||
|
return engine;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* IndexHandler - Handles createIndexes, dropIndexes, listIndexes commands
|
||||||
|
*/
|
||||||
|
export class IndexHandler implements ICommandHandler {
|
||||||
|
async handle(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { command } = context;
|
||||||
|
|
||||||
|
if (command.createIndexes) {
|
||||||
|
return this.handleCreateIndexes(context);
|
||||||
|
} else if (command.dropIndexes) {
|
||||||
|
return this.handleDropIndexes(context);
|
||||||
|
} else if (command.listIndexes) {
|
||||||
|
return this.handleListIndexes(context);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'Unknown index command',
|
||||||
|
code: 59,
|
||||||
|
codeName: 'CommandNotFound',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle createIndexes command
|
||||||
|
*/
|
||||||
|
private async handleCreateIndexes(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { storage, database, command } = context;
|
||||||
|
|
||||||
|
const collection = command.createIndexes;
|
||||||
|
const indexes = command.indexes || [];
|
||||||
|
|
||||||
|
if (!Array.isArray(indexes)) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'indexes must be an array',
|
||||||
|
code: 2,
|
||||||
|
codeName: 'BadValue',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure collection exists
|
||||||
|
await storage.createCollection(database, collection);
|
||||||
|
|
||||||
|
const indexEngine = getIndexEngine(storage, database, collection);
|
||||||
|
const createdNames: string[] = [];
|
||||||
|
let numIndexesBefore = 0;
|
||||||
|
let numIndexesAfter = 0;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const existingIndexes = await indexEngine.listIndexes();
|
||||||
|
numIndexesBefore = existingIndexes.length;
|
||||||
|
|
||||||
|
for (const indexSpec of indexes) {
|
||||||
|
const key = indexSpec.key;
|
||||||
|
const options = {
|
||||||
|
name: indexSpec.name,
|
||||||
|
unique: indexSpec.unique,
|
||||||
|
sparse: indexSpec.sparse,
|
||||||
|
expireAfterSeconds: indexSpec.expireAfterSeconds,
|
||||||
|
background: indexSpec.background,
|
||||||
|
partialFilterExpression: indexSpec.partialFilterExpression,
|
||||||
|
};
|
||||||
|
|
||||||
|
const name = await indexEngine.createIndex(key, options);
|
||||||
|
createdNames.push(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
const finalIndexes = await indexEngine.listIndexes();
|
||||||
|
numIndexesAfter = finalIndexes.length;
|
||||||
|
} catch (error: any) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: error.message || 'Failed to create index',
|
||||||
|
code: error.code || 1,
|
||||||
|
codeName: error.codeName || 'InternalError',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
numIndexesBefore,
|
||||||
|
numIndexesAfter,
|
||||||
|
createdCollectionAutomatically: false,
|
||||||
|
commitQuorum: 'votingMembers',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle dropIndexes command
|
||||||
|
*/
|
||||||
|
private async handleDropIndexes(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { storage, database, command } = context;
|
||||||
|
|
||||||
|
const collection = command.dropIndexes;
|
||||||
|
const indexName = command.index;
|
||||||
|
|
||||||
|
// Check if collection exists
|
||||||
|
const exists = await storage.collectionExists(database, collection);
|
||||||
|
if (!exists) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: `ns not found ${database}.${collection}`,
|
||||||
|
code: 26,
|
||||||
|
codeName: 'NamespaceNotFound',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const indexEngine = getIndexEngine(storage, database, collection);
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (indexName === '*') {
|
||||||
|
// Drop all indexes except _id
|
||||||
|
await indexEngine.dropAllIndexes();
|
||||||
|
} else if (typeof indexName === 'string') {
|
||||||
|
// Drop specific index by name
|
||||||
|
await indexEngine.dropIndex(indexName);
|
||||||
|
} else if (typeof indexName === 'object') {
|
||||||
|
// Drop index by key specification
|
||||||
|
const indexes = await indexEngine.listIndexes();
|
||||||
|
const keyStr = JSON.stringify(indexName);
|
||||||
|
|
||||||
|
for (const idx of indexes) {
|
||||||
|
if (JSON.stringify(idx.key) === keyStr) {
|
||||||
|
await indexEngine.dropIndex(idx.name);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ok: 1, nIndexesWas: 1 };
|
||||||
|
} catch (error: any) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: error.message || 'Failed to drop index',
|
||||||
|
code: error.code || 27,
|
||||||
|
codeName: error.codeName || 'IndexNotFound',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle listIndexes command
|
||||||
|
*/
|
||||||
|
private async handleListIndexes(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { storage, database, command } = context;
|
||||||
|
|
||||||
|
const collection = command.listIndexes;
|
||||||
|
const cursor = command.cursor || {};
|
||||||
|
const batchSize = cursor.batchSize || 101;
|
||||||
|
|
||||||
|
// Check if collection exists
|
||||||
|
const exists = await storage.collectionExists(database, collection);
|
||||||
|
if (!exists) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: `ns not found ${database}.${collection}`,
|
||||||
|
code: 26,
|
||||||
|
codeName: 'NamespaceNotFound',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const indexEngine = getIndexEngine(storage, database, collection);
|
||||||
|
const indexes = await indexEngine.listIndexes();
|
||||||
|
|
||||||
|
// Format indexes for response
|
||||||
|
const indexDocs = indexes.map(idx => ({
|
||||||
|
v: idx.v || 2,
|
||||||
|
key: idx.key,
|
||||||
|
name: idx.name,
|
||||||
|
...(idx.unique ? { unique: idx.unique } : {}),
|
||||||
|
...(idx.sparse ? { sparse: idx.sparse } : {}),
|
||||||
|
...(idx.expireAfterSeconds !== undefined ? { expireAfterSeconds: idx.expireAfterSeconds } : {}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
cursor: {
|
||||||
|
id: plugins.bson.Long.fromNumber(0),
|
||||||
|
ns: `${database}.${collection}`,
|
||||||
|
firstBatch: indexDocs,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
97
ts/ts_tsmdb/server/handlers/InsertHandler.ts
Normal file
97
ts/ts_tsmdb/server/handlers/InsertHandler.ts
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
import * as plugins from '../../plugins.js';
|
||||||
|
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
||||||
|
import type { IStoredDocument } from '../../types/interfaces.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* InsertHandler - Handles insert commands
|
||||||
|
*/
|
||||||
|
export class InsertHandler implements ICommandHandler {
|
||||||
|
async handle(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { storage, database, command, documentSequences } = context;
|
||||||
|
|
||||||
|
const collection = command.insert;
|
||||||
|
if (typeof collection !== 'string') {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'insert command requires a collection name',
|
||||||
|
code: 2,
|
||||||
|
codeName: 'BadValue',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get documents from command or document sequences
|
||||||
|
let documents: plugins.bson.Document[] = command.documents || [];
|
||||||
|
|
||||||
|
// Check for OP_MSG document sequences (for bulk inserts)
|
||||||
|
if (documentSequences && documentSequences.has('documents')) {
|
||||||
|
documents = documentSequences.get('documents')!;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Array.isArray(documents) || documents.length === 0) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'insert command requires documents array',
|
||||||
|
code: 2,
|
||||||
|
codeName: 'BadValue',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const ordered = command.ordered !== false;
|
||||||
|
const writeErrors: plugins.bson.Document[] = [];
|
||||||
|
let insertedCount = 0;
|
||||||
|
|
||||||
|
// Ensure collection exists
|
||||||
|
await storage.createCollection(database, collection);
|
||||||
|
|
||||||
|
const indexEngine = context.getIndexEngine(collection);
|
||||||
|
|
||||||
|
// Insert documents
|
||||||
|
for (let i = 0; i < documents.length; i++) {
|
||||||
|
const doc = documents[i];
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Ensure _id exists
|
||||||
|
if (!doc._id) {
|
||||||
|
doc._id = new plugins.bson.ObjectId();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check index constraints before insert (doc now has _id)
|
||||||
|
await indexEngine.onInsert(doc as IStoredDocument);
|
||||||
|
|
||||||
|
await storage.insertOne(database, collection, doc);
|
||||||
|
insertedCount++;
|
||||||
|
} catch (error: any) {
|
||||||
|
const writeError: plugins.bson.Document = {
|
||||||
|
index: i,
|
||||||
|
code: error.code || 11000,
|
||||||
|
errmsg: error.message || 'Insert failed',
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check for duplicate key error
|
||||||
|
if (error.message?.includes('Duplicate key')) {
|
||||||
|
writeError.code = 11000;
|
||||||
|
writeError.keyPattern = { _id: 1 };
|
||||||
|
writeError.keyValue = { _id: doc._id };
|
||||||
|
}
|
||||||
|
|
||||||
|
writeErrors.push(writeError);
|
||||||
|
|
||||||
|
if (ordered) {
|
||||||
|
// Stop on first error for ordered inserts
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const response: plugins.bson.Document = {
|
||||||
|
ok: 1,
|
||||||
|
n: insertedCount,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (writeErrors.length > 0) {
|
||||||
|
response.writeErrors = writeErrors;
|
||||||
|
}
|
||||||
|
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
}
|
||||||
344
ts/ts_tsmdb/server/handlers/UpdateHandler.ts
Normal file
344
ts/ts_tsmdb/server/handlers/UpdateHandler.ts
Normal file
@@ -0,0 +1,344 @@
|
|||||||
|
import * as plugins from '../../plugins.js';
|
||||||
|
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
||||||
|
import type { IStoredDocument } from '../../types/interfaces.js';
|
||||||
|
import { QueryEngine } from '../../engine/QueryEngine.js';
|
||||||
|
import { UpdateEngine } from '../../engine/UpdateEngine.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* UpdateHandler - Handles update, findAndModify commands
|
||||||
|
*/
|
||||||
|
export class UpdateHandler implements ICommandHandler {
|
||||||
|
async handle(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { command } = context;
|
||||||
|
|
||||||
|
// Check findAndModify first since it also has an 'update' field
|
||||||
|
if (command.findAndModify) {
|
||||||
|
return this.handleFindAndModify(context);
|
||||||
|
} else if (command.update && typeof command.update === 'string') {
|
||||||
|
// 'update' command has collection name as the value
|
||||||
|
return this.handleUpdate(context);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'Unknown update-related command',
|
||||||
|
code: 59,
|
||||||
|
codeName: 'CommandNotFound',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle update command
|
||||||
|
*/
|
||||||
|
private async handleUpdate(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { storage, database, command, documentSequences } = context;
|
||||||
|
|
||||||
|
const collection = command.update;
|
||||||
|
if (typeof collection !== 'string') {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'update command requires a collection name',
|
||||||
|
code: 2,
|
||||||
|
codeName: 'BadValue',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get updates from command or document sequences
|
||||||
|
let updates: plugins.bson.Document[] = command.updates || [];
|
||||||
|
|
||||||
|
// Check for OP_MSG document sequences
|
||||||
|
if (documentSequences && documentSequences.has('updates')) {
|
||||||
|
updates = documentSequences.get('updates')!;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Array.isArray(updates) || updates.length === 0) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'update command requires updates array',
|
||||||
|
code: 2,
|
||||||
|
codeName: 'BadValue',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const ordered = command.ordered !== false;
|
||||||
|
const writeErrors: plugins.bson.Document[] = [];
|
||||||
|
let totalMatched = 0;
|
||||||
|
let totalModified = 0;
|
||||||
|
let totalUpserted = 0;
|
||||||
|
const upserted: plugins.bson.Document[] = [];
|
||||||
|
|
||||||
|
// Ensure collection exists
|
||||||
|
await storage.createCollection(database, collection);
|
||||||
|
|
||||||
|
const indexEngine = context.getIndexEngine(collection);
|
||||||
|
|
||||||
|
for (let i = 0; i < updates.length; i++) {
|
||||||
|
const updateSpec = updates[i];
|
||||||
|
const filter = updateSpec.q || updateSpec.filter || {};
|
||||||
|
const update = updateSpec.u || updateSpec.update || {};
|
||||||
|
const multi = updateSpec.multi || false;
|
||||||
|
const upsert = updateSpec.upsert || false;
|
||||||
|
const arrayFilters = updateSpec.arrayFilters;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Try to use index-accelerated query
|
||||||
|
const candidateIds = await indexEngine.findCandidateIds(filter);
|
||||||
|
|
||||||
|
let documents: IStoredDocument[];
|
||||||
|
if (candidateIds !== null) {
|
||||||
|
documents = await storage.findByIds(database, collection, candidateIds);
|
||||||
|
} else {
|
||||||
|
documents = await storage.findAll(database, collection);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply filter
|
||||||
|
let matchingDocs = QueryEngine.filter(documents, filter);
|
||||||
|
|
||||||
|
if (matchingDocs.length === 0 && upsert) {
|
||||||
|
// Upsert: create new document
|
||||||
|
const newDoc: plugins.bson.Document = { _id: new plugins.bson.ObjectId() };
|
||||||
|
|
||||||
|
// Apply filter fields to the new document
|
||||||
|
this.applyFilterToDoc(newDoc, filter);
|
||||||
|
|
||||||
|
// Apply update
|
||||||
|
const updatedDoc = UpdateEngine.applyUpdate(newDoc as any, update, arrayFilters);
|
||||||
|
|
||||||
|
// Handle $setOnInsert
|
||||||
|
if (update.$setOnInsert) {
|
||||||
|
Object.assign(updatedDoc, update.$setOnInsert);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update index for the new document
|
||||||
|
await indexEngine.onInsert(updatedDoc);
|
||||||
|
await storage.insertOne(database, collection, updatedDoc);
|
||||||
|
totalUpserted++;
|
||||||
|
upserted.push({ index: i, _id: updatedDoc._id });
|
||||||
|
} else {
|
||||||
|
// Update existing documents
|
||||||
|
const docsToUpdate = multi ? matchingDocs : matchingDocs.slice(0, 1);
|
||||||
|
totalMatched += docsToUpdate.length;
|
||||||
|
|
||||||
|
for (const doc of docsToUpdate) {
|
||||||
|
const updatedDoc = UpdateEngine.applyUpdate(doc, update, arrayFilters);
|
||||||
|
|
||||||
|
// Check if document actually changed
|
||||||
|
const changed = JSON.stringify(doc) !== JSON.stringify(updatedDoc);
|
||||||
|
if (changed) {
|
||||||
|
// Update index
|
||||||
|
await indexEngine.onUpdate(doc as any, updatedDoc);
|
||||||
|
await storage.updateById(database, collection, doc._id, updatedDoc);
|
||||||
|
totalModified++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
writeErrors.push({
|
||||||
|
index: i,
|
||||||
|
code: error.code || 1,
|
||||||
|
errmsg: error.message || 'Update failed',
|
||||||
|
});
|
||||||
|
|
||||||
|
if (ordered) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const response: plugins.bson.Document = {
|
||||||
|
ok: 1,
|
||||||
|
n: totalMatched + totalUpserted,
|
||||||
|
nModified: totalModified,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (upserted.length > 0) {
|
||||||
|
response.upserted = upserted;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (writeErrors.length > 0) {
|
||||||
|
response.writeErrors = writeErrors;
|
||||||
|
}
|
||||||
|
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle findAndModify command
|
||||||
|
*/
|
||||||
|
private async handleFindAndModify(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||||
|
const { storage, database, command } = context;
|
||||||
|
|
||||||
|
const collection = command.findAndModify;
|
||||||
|
const query = command.query || {};
|
||||||
|
const update = command.update;
|
||||||
|
const remove = command.remove || false;
|
||||||
|
const returnNew = command.new || false;
|
||||||
|
const upsert = command.upsert || false;
|
||||||
|
const sort = command.sort;
|
||||||
|
const fields = command.fields;
|
||||||
|
const arrayFilters = command.arrayFilters;
|
||||||
|
|
||||||
|
// Validate - either update or remove, not both
|
||||||
|
if (update && remove) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'cannot specify both update and remove',
|
||||||
|
code: 2,
|
||||||
|
codeName: 'BadValue',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!update && !remove) {
|
||||||
|
return {
|
||||||
|
ok: 0,
|
||||||
|
errmsg: 'either update or remove is required',
|
||||||
|
code: 2,
|
||||||
|
codeName: 'BadValue',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure collection exists
|
||||||
|
await storage.createCollection(database, collection);
|
||||||
|
|
||||||
|
// Try to use index-accelerated query
|
||||||
|
const indexEngine = context.getIndexEngine(collection);
|
||||||
|
const candidateIds = await indexEngine.findCandidateIds(query);
|
||||||
|
|
||||||
|
let documents: IStoredDocument[];
|
||||||
|
if (candidateIds !== null) {
|
||||||
|
documents = await storage.findByIds(database, collection, candidateIds);
|
||||||
|
} else {
|
||||||
|
documents = await storage.findAll(database, collection);
|
||||||
|
}
|
||||||
|
|
||||||
|
let matchingDocs = QueryEngine.filter(documents, query);
|
||||||
|
|
||||||
|
// Apply sort if specified
|
||||||
|
if (sort) {
|
||||||
|
matchingDocs = QueryEngine.sort(matchingDocs, sort);
|
||||||
|
}
|
||||||
|
|
||||||
|
const doc = matchingDocs[0];
|
||||||
|
|
||||||
|
if (remove) {
|
||||||
|
// Delete operation
|
||||||
|
if (!doc) {
|
||||||
|
return { ok: 1, value: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update index for delete
|
||||||
|
await indexEngine.onDelete(doc as any);
|
||||||
|
await storage.deleteById(database, collection, doc._id);
|
||||||
|
|
||||||
|
let result = doc;
|
||||||
|
if (fields) {
|
||||||
|
result = QueryEngine.project([doc], fields)[0] as any;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
ok: 1,
|
||||||
|
value: result,
|
||||||
|
lastErrorObject: {
|
||||||
|
n: 1,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
// Update operation
|
||||||
|
if (!doc && !upsert) {
|
||||||
|
return { ok: 1, value: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
let resultDoc: plugins.bson.Document;
|
||||||
|
let originalDoc: plugins.bson.Document | null = null;
|
||||||
|
let isUpsert = false;
|
||||||
|
|
||||||
|
if (doc) {
|
||||||
|
// Update existing
|
||||||
|
originalDoc = { ...doc };
|
||||||
|
resultDoc = UpdateEngine.applyUpdate(doc, update, arrayFilters);
|
||||||
|
// Update index
|
||||||
|
await indexEngine.onUpdate(doc as any, resultDoc as any);
|
||||||
|
await storage.updateById(database, collection, doc._id, resultDoc as any);
|
||||||
|
} else {
|
||||||
|
// Upsert
|
||||||
|
isUpsert = true;
|
||||||
|
const newDoc: plugins.bson.Document = { _id: new plugins.bson.ObjectId() };
|
||||||
|
this.applyFilterToDoc(newDoc, query);
|
||||||
|
resultDoc = UpdateEngine.applyUpdate(newDoc as any, update, arrayFilters);
|
||||||
|
|
||||||
|
if (update.$setOnInsert) {
|
||||||
|
Object.assign(resultDoc, update.$setOnInsert);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update index for insert
|
||||||
|
await indexEngine.onInsert(resultDoc as any);
|
||||||
|
await storage.insertOne(database, collection, resultDoc);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply projection
|
||||||
|
let returnValue = returnNew ? resultDoc : (originalDoc || null);
|
||||||
|
if (returnValue && fields) {
|
||||||
|
returnValue = QueryEngine.project([returnValue as any], fields)[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
const response: plugins.bson.Document = {
|
||||||
|
ok: 1,
|
||||||
|
value: returnValue,
|
||||||
|
lastErrorObject: {
|
||||||
|
n: 1,
|
||||||
|
updatedExisting: !isUpsert && doc !== undefined,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
if (isUpsert) {
|
||||||
|
response.lastErrorObject.upserted = resultDoc._id;
|
||||||
|
}
|
||||||
|
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply filter equality conditions to a new document (for upsert)
|
||||||
|
*/
|
||||||
|
private applyFilterToDoc(doc: plugins.bson.Document, filter: plugins.bson.Document): void {
|
||||||
|
for (const [key, value] of Object.entries(filter)) {
|
||||||
|
// Skip operators
|
||||||
|
if (key.startsWith('$')) continue;
|
||||||
|
|
||||||
|
// Handle nested paths
|
||||||
|
if (typeof value === 'object' && value !== null) {
|
||||||
|
// Check if it's an operator
|
||||||
|
const valueKeys = Object.keys(value);
|
||||||
|
if (valueKeys.some(k => k.startsWith('$'))) {
|
||||||
|
// Extract equality value from $eq if present
|
||||||
|
if ('$eq' in value) {
|
||||||
|
this.setNestedValue(doc, key, value.$eq);
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Direct value assignment
|
||||||
|
this.setNestedValue(doc, key, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set a nested value using dot notation
|
||||||
|
*/
|
||||||
|
private setNestedValue(obj: plugins.bson.Document, path: string, value: any): void {
|
||||||
|
const parts = path.split('.');
|
||||||
|
let current = obj;
|
||||||
|
|
||||||
|
for (let i = 0; i < parts.length - 1; i++) {
|
||||||
|
const part = parts[i];
|
||||||
|
if (!(part in current)) {
|
||||||
|
current[part] = {};
|
||||||
|
}
|
||||||
|
current = current[part];
|
||||||
|
}
|
||||||
|
|
||||||
|
current[parts[parts.length - 1]] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
10
ts/ts_tsmdb/server/handlers/index.ts
Normal file
10
ts/ts_tsmdb/server/handlers/index.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
// Export all command handlers
|
||||||
|
|
||||||
|
export { HelloHandler } from './HelloHandler.js';
|
||||||
|
export { InsertHandler } from './InsertHandler.js';
|
||||||
|
export { FindHandler } from './FindHandler.js';
|
||||||
|
export { UpdateHandler } from './UpdateHandler.js';
|
||||||
|
export { DeleteHandler } from './DeleteHandler.js';
|
||||||
|
export { AggregateHandler } from './AggregateHandler.js';
|
||||||
|
export { IndexHandler } from './IndexHandler.js';
|
||||||
|
export { AdminHandler } from './AdminHandler.js';
|
||||||
10
ts/ts_tsmdb/server/index.ts
Normal file
10
ts/ts_tsmdb/server/index.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
// Server module exports
|
||||||
|
|
||||||
|
export { TsmdbServer } from './TsmdbServer.js';
|
||||||
|
export type { ITsmdbServerOptions } from './TsmdbServer.js';
|
||||||
|
export { WireProtocol } from './WireProtocol.js';
|
||||||
|
export { CommandRouter } from './CommandRouter.js';
|
||||||
|
export type { ICommandHandler, IHandlerContext, ICursorState } from './CommandRouter.js';
|
||||||
|
|
||||||
|
// Export handlers
|
||||||
|
export * from './handlers/index.js';
|
||||||
562
ts/ts_tsmdb/storage/FileStorageAdapter.ts
Normal file
562
ts/ts_tsmdb/storage/FileStorageAdapter.ts
Normal file
@@ -0,0 +1,562 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { IStorageAdapter } from './IStorageAdapter.js';
|
||||||
|
import type { IStoredDocument, IOpLogEntry, Document } from '../types/interfaces.js';
|
||||||
|
import { calculateDocumentChecksum, verifyChecksum } from '../utils/checksum.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* File storage adapter options
|
||||||
|
*/
|
||||||
|
export interface IFileStorageAdapterOptions {
|
||||||
|
/** Enable checksum verification for data integrity */
|
||||||
|
enableChecksums?: boolean;
|
||||||
|
/** Throw error on checksum mismatch (default: false, just log warning) */
|
||||||
|
strictChecksums?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* File-based storage adapter for TsmDB
|
||||||
|
* Stores data in JSON files on disk for persistence
|
||||||
|
*/
|
||||||
|
export class FileStorageAdapter implements IStorageAdapter {
|
||||||
|
private basePath: string;
|
||||||
|
private opLogCounter = 0;
|
||||||
|
private initialized = false;
|
||||||
|
private fs = new plugins.smartfs.SmartFs(new plugins.smartfs.SmartFsProviderNode());
|
||||||
|
private enableChecksums: boolean;
|
||||||
|
private strictChecksums: boolean;
|
||||||
|
|
||||||
|
constructor(basePath: string, options?: IFileStorageAdapterOptions) {
|
||||||
|
this.basePath = basePath;
|
||||||
|
this.enableChecksums = options?.enableChecksums ?? false;
|
||||||
|
this.strictChecksums = options?.strictChecksums ?? false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Helper Methods
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
private getDbPath(dbName: string): string {
|
||||||
|
return plugins.smartpath.join(this.basePath, dbName);
|
||||||
|
}
|
||||||
|
|
||||||
|
private getCollectionPath(dbName: string, collName: string): string {
|
||||||
|
return plugins.smartpath.join(this.basePath, dbName, `${collName}.json`);
|
||||||
|
}
|
||||||
|
|
||||||
|
private getIndexPath(dbName: string, collName: string): string {
|
||||||
|
return plugins.smartpath.join(this.basePath, dbName, `${collName}.indexes.json`);
|
||||||
|
}
|
||||||
|
|
||||||
|
private getOpLogPath(): string {
|
||||||
|
return plugins.smartpath.join(this.basePath, '_oplog.json');
|
||||||
|
}
|
||||||
|
|
||||||
|
private getMetaPath(): string {
|
||||||
|
return plugins.smartpath.join(this.basePath, '_meta.json');
|
||||||
|
}
|
||||||
|
|
||||||
|
private async readJsonFile<T>(filePath: string, defaultValue: T): Promise<T> {
|
||||||
|
try {
|
||||||
|
const exists = await this.fs.file(filePath).exists();
|
||||||
|
if (!exists) return defaultValue;
|
||||||
|
const content = await this.fs.file(filePath).encoding('utf8').read();
|
||||||
|
return JSON.parse(content as string);
|
||||||
|
} catch {
|
||||||
|
return defaultValue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async writeJsonFile(filePath: string, data: any): Promise<void> {
|
||||||
|
const dir = filePath.substring(0, filePath.lastIndexOf('/'));
|
||||||
|
await this.fs.directory(dir).recursive().create();
|
||||||
|
await this.fs.file(filePath).encoding('utf8').write(JSON.stringify(data, null, 2));
|
||||||
|
}
|
||||||
|
|
||||||
|
private restoreObjectIds(doc: any): IStoredDocument {
|
||||||
|
if (doc._id) {
|
||||||
|
if (typeof doc._id === 'string') {
|
||||||
|
doc._id = new plugins.bson.ObjectId(doc._id);
|
||||||
|
} else if (typeof doc._id === 'object' && doc._id.$oid) {
|
||||||
|
doc._id = new plugins.bson.ObjectId(doc._id.$oid);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return doc;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify document checksum and handle errors
|
||||||
|
*/
|
||||||
|
private verifyDocumentChecksum(doc: any): boolean {
|
||||||
|
if (!this.enableChecksums || !doc._checksum) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const isValid = verifyChecksum(doc);
|
||||||
|
if (!isValid) {
|
||||||
|
const errorMsg = `Checksum mismatch for document ${doc._id}`;
|
||||||
|
if (this.strictChecksums) {
|
||||||
|
throw new Error(errorMsg);
|
||||||
|
} else {
|
||||||
|
console.warn(`WARNING: ${errorMsg}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return isValid;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add checksum to document before storing
|
||||||
|
*/
|
||||||
|
private prepareDocumentForStorage(doc: any): any {
|
||||||
|
if (!this.enableChecksums) {
|
||||||
|
return doc;
|
||||||
|
}
|
||||||
|
const checksum = calculateDocumentChecksum(doc);
|
||||||
|
return { ...doc, _checksum: checksum };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove internal checksum field before returning to user
|
||||||
|
*/
|
||||||
|
private cleanDocumentForReturn(doc: any): IStoredDocument {
|
||||||
|
const { _checksum, ...cleanDoc } = doc;
|
||||||
|
return this.restoreObjectIds(cleanDoc);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Initialization
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
async initialize(): Promise<void> {
|
||||||
|
if (this.initialized) return;
|
||||||
|
|
||||||
|
await this.fs.directory(this.basePath).recursive().create();
|
||||||
|
|
||||||
|
// Load metadata
|
||||||
|
const meta = await this.readJsonFile(this.getMetaPath(), { opLogCounter: 0 });
|
||||||
|
this.opLogCounter = meta.opLogCounter || 0;
|
||||||
|
|
||||||
|
this.initialized = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
async close(): Promise<void> {
|
||||||
|
// Save metadata
|
||||||
|
await this.writeJsonFile(this.getMetaPath(), { opLogCounter: this.opLogCounter });
|
||||||
|
this.initialized = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Database Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
async listDatabases(): Promise<string[]> {
|
||||||
|
await this.initialize();
|
||||||
|
try {
|
||||||
|
const entries = await this.fs.directory(this.basePath).list();
|
||||||
|
return entries
|
||||||
|
.filter(entry => entry.isDirectory && !entry.name.startsWith('_'))
|
||||||
|
.map(entry => entry.name);
|
||||||
|
} catch {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async createDatabase(dbName: string): Promise<void> {
|
||||||
|
await this.initialize();
|
||||||
|
const dbPath = this.getDbPath(dbName);
|
||||||
|
await this.fs.directory(dbPath).recursive().create();
|
||||||
|
}
|
||||||
|
|
||||||
|
async dropDatabase(dbName: string): Promise<boolean> {
|
||||||
|
await this.initialize();
|
||||||
|
const dbPath = this.getDbPath(dbName);
|
||||||
|
try {
|
||||||
|
const exists = await this.fs.directory(dbPath).exists();
|
||||||
|
if (exists) {
|
||||||
|
await this.fs.directory(dbPath).recursive().delete();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async databaseExists(dbName: string): Promise<boolean> {
|
||||||
|
await this.initialize();
|
||||||
|
const dbPath = this.getDbPath(dbName);
|
||||||
|
return this.fs.directory(dbPath).exists();
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Collection Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
async listCollections(dbName: string): Promise<string[]> {
|
||||||
|
await this.initialize();
|
||||||
|
const dbPath = this.getDbPath(dbName);
|
||||||
|
try {
|
||||||
|
const entries = await this.fs.directory(dbPath).list();
|
||||||
|
return entries
|
||||||
|
.filter(entry => entry.isFile && entry.name.endsWith('.json') && !entry.name.endsWith('.indexes.json'))
|
||||||
|
.map(entry => entry.name.replace('.json', ''));
|
||||||
|
} catch {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async createCollection(dbName: string, collName: string): Promise<void> {
|
||||||
|
await this.createDatabase(dbName);
|
||||||
|
const collPath = this.getCollectionPath(dbName, collName);
|
||||||
|
const exists = await this.fs.file(collPath).exists();
|
||||||
|
if (!exists) {
|
||||||
|
await this.writeJsonFile(collPath, []);
|
||||||
|
// Create default _id index
|
||||||
|
await this.writeJsonFile(this.getIndexPath(dbName, collName), [
|
||||||
|
{ name: '_id_', key: { _id: 1 }, unique: true }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async dropCollection(dbName: string, collName: string): Promise<boolean> {
|
||||||
|
await this.initialize();
|
||||||
|
const collPath = this.getCollectionPath(dbName, collName);
|
||||||
|
const indexPath = this.getIndexPath(dbName, collName);
|
||||||
|
try {
|
||||||
|
const exists = await this.fs.file(collPath).exists();
|
||||||
|
if (exists) {
|
||||||
|
await this.fs.file(collPath).delete();
|
||||||
|
try {
|
||||||
|
await this.fs.file(indexPath).delete();
|
||||||
|
} catch {}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async collectionExists(dbName: string, collName: string): Promise<boolean> {
|
||||||
|
await this.initialize();
|
||||||
|
const collPath = this.getCollectionPath(dbName, collName);
|
||||||
|
return this.fs.file(collPath).exists();
|
||||||
|
}
|
||||||
|
|
||||||
|
async renameCollection(dbName: string, oldName: string, newName: string): Promise<void> {
|
||||||
|
await this.initialize();
|
||||||
|
const oldPath = this.getCollectionPath(dbName, oldName);
|
||||||
|
const newPath = this.getCollectionPath(dbName, newName);
|
||||||
|
const oldIndexPath = this.getIndexPath(dbName, oldName);
|
||||||
|
const newIndexPath = this.getIndexPath(dbName, newName);
|
||||||
|
|
||||||
|
const exists = await this.fs.file(oldPath).exists();
|
||||||
|
if (!exists) {
|
||||||
|
throw new Error(`Collection ${oldName} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read, write to new, delete old
|
||||||
|
const docs = await this.readJsonFile<any[]>(oldPath, []);
|
||||||
|
await this.writeJsonFile(newPath, docs);
|
||||||
|
await this.fs.file(oldPath).delete();
|
||||||
|
|
||||||
|
// Handle indexes
|
||||||
|
const indexes = await this.readJsonFile<any[]>(oldIndexPath, []);
|
||||||
|
await this.writeJsonFile(newIndexPath, indexes);
|
||||||
|
try {
|
||||||
|
await this.fs.file(oldIndexPath).delete();
|
||||||
|
} catch {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Document Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
async insertOne(dbName: string, collName: string, doc: Document): Promise<IStoredDocument> {
|
||||||
|
await this.createCollection(dbName, collName);
|
||||||
|
const collPath = this.getCollectionPath(dbName, collName);
|
||||||
|
const docs = await this.readJsonFile<any[]>(collPath, []);
|
||||||
|
|
||||||
|
const storedDoc: IStoredDocument = {
|
||||||
|
...doc,
|
||||||
|
_id: doc._id ? (doc._id instanceof plugins.bson.ObjectId ? doc._id : new plugins.bson.ObjectId(doc._id)) : new plugins.bson.ObjectId(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check for duplicate
|
||||||
|
const idStr = storedDoc._id.toHexString();
|
||||||
|
if (docs.some(d => d._id === idStr || (d._id && d._id.toString() === idStr))) {
|
||||||
|
throw new Error(`Duplicate key error: _id ${idStr}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add checksum if enabled
|
||||||
|
const docToStore = this.prepareDocumentForStorage(storedDoc);
|
||||||
|
docs.push(docToStore);
|
||||||
|
await this.writeJsonFile(collPath, docs);
|
||||||
|
return storedDoc;
|
||||||
|
}
|
||||||
|
|
||||||
|
async insertMany(dbName: string, collName: string, docsToInsert: Document[]): Promise<IStoredDocument[]> {
|
||||||
|
await this.createCollection(dbName, collName);
|
||||||
|
const collPath = this.getCollectionPath(dbName, collName);
|
||||||
|
const docs = await this.readJsonFile<any[]>(collPath, []);
|
||||||
|
|
||||||
|
const results: IStoredDocument[] = [];
|
||||||
|
const existingIds = new Set(docs.map(d => d._id?.toString?.() || d._id));
|
||||||
|
|
||||||
|
for (const doc of docsToInsert) {
|
||||||
|
const storedDoc: IStoredDocument = {
|
||||||
|
...doc,
|
||||||
|
_id: doc._id ? (doc._id instanceof plugins.bson.ObjectId ? doc._id : new plugins.bson.ObjectId(doc._id)) : new plugins.bson.ObjectId(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const idStr = storedDoc._id.toHexString();
|
||||||
|
if (existingIds.has(idStr)) {
|
||||||
|
throw new Error(`Duplicate key error: _id ${idStr}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
existingIds.add(idStr);
|
||||||
|
// Add checksum if enabled
|
||||||
|
const docToStore = this.prepareDocumentForStorage(storedDoc);
|
||||||
|
docs.push(docToStore);
|
||||||
|
results.push(storedDoc);
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.writeJsonFile(collPath, docs);
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
async findAll(dbName: string, collName: string): Promise<IStoredDocument[]> {
|
||||||
|
await this.createCollection(dbName, collName);
|
||||||
|
const collPath = this.getCollectionPath(dbName, collName);
|
||||||
|
const docs = await this.readJsonFile<any[]>(collPath, []);
|
||||||
|
return docs.map(doc => {
|
||||||
|
// Verify checksum if enabled
|
||||||
|
this.verifyDocumentChecksum(doc);
|
||||||
|
// Clean and return document without internal checksum field
|
||||||
|
return this.cleanDocumentForReturn(doc);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async findByIds(dbName: string, collName: string, ids: Set<string>): Promise<IStoredDocument[]> {
|
||||||
|
await this.createCollection(dbName, collName);
|
||||||
|
const collPath = this.getCollectionPath(dbName, collName);
|
||||||
|
const docs = await this.readJsonFile<any[]>(collPath, []);
|
||||||
|
const results: IStoredDocument[] = [];
|
||||||
|
for (const doc of docs) {
|
||||||
|
// Verify checksum if enabled
|
||||||
|
this.verifyDocumentChecksum(doc);
|
||||||
|
// Clean and restore document
|
||||||
|
const cleaned = this.cleanDocumentForReturn(doc);
|
||||||
|
if (ids.has(cleaned._id.toHexString())) {
|
||||||
|
results.push(cleaned);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
async findById(dbName: string, collName: string, id: plugins.bson.ObjectId): Promise<IStoredDocument | null> {
|
||||||
|
// Use findAll which already handles checksum verification
|
||||||
|
const docs = await this.findAll(dbName, collName);
|
||||||
|
const idStr = id.toHexString();
|
||||||
|
return docs.find(d => d._id.toHexString() === idStr) || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async updateById(dbName: string, collName: string, id: plugins.bson.ObjectId, doc: IStoredDocument): Promise<boolean> {
|
||||||
|
const collPath = this.getCollectionPath(dbName, collName);
|
||||||
|
const docs = await this.readJsonFile<any[]>(collPath, []);
|
||||||
|
const idStr = id.toHexString();
|
||||||
|
|
||||||
|
const idx = docs.findIndex(d => {
|
||||||
|
const docId = d._id?.toHexString?.() || d._id?.toString?.() || d._id;
|
||||||
|
return docId === idStr;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (idx === -1) return false;
|
||||||
|
|
||||||
|
// Add checksum if enabled
|
||||||
|
const docToStore = this.prepareDocumentForStorage(doc);
|
||||||
|
docs[idx] = docToStore;
|
||||||
|
await this.writeJsonFile(collPath, docs);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
async deleteById(dbName: string, collName: string, id: plugins.bson.ObjectId): Promise<boolean> {
|
||||||
|
const collPath = this.getCollectionPath(dbName, collName);
|
||||||
|
const docs = await this.readJsonFile<any[]>(collPath, []);
|
||||||
|
const idStr = id.toHexString();
|
||||||
|
|
||||||
|
const idx = docs.findIndex(d => {
|
||||||
|
const docId = d._id?.toHexString?.() || d._id?.toString?.() || d._id;
|
||||||
|
return docId === idStr;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (idx === -1) return false;
|
||||||
|
|
||||||
|
docs.splice(idx, 1);
|
||||||
|
await this.writeJsonFile(collPath, docs);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
async deleteByIds(dbName: string, collName: string, ids: plugins.bson.ObjectId[]): Promise<number> {
|
||||||
|
const collPath = this.getCollectionPath(dbName, collName);
|
||||||
|
const docs = await this.readJsonFile<any[]>(collPath, []);
|
||||||
|
const idStrs = new Set(ids.map(id => id.toHexString()));
|
||||||
|
|
||||||
|
const originalLength = docs.length;
|
||||||
|
const filtered = docs.filter(d => {
|
||||||
|
const docId = d._id?.toHexString?.() || d._id?.toString?.() || d._id;
|
||||||
|
return !idStrs.has(docId);
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.writeJsonFile(collPath, filtered);
|
||||||
|
return originalLength - filtered.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
async count(dbName: string, collName: string): Promise<number> {
|
||||||
|
const collPath = this.getCollectionPath(dbName, collName);
|
||||||
|
const docs = await this.readJsonFile<any[]>(collPath, []);
|
||||||
|
return docs.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Index Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
async saveIndex(
|
||||||
|
dbName: string,
|
||||||
|
collName: string,
|
||||||
|
indexName: string,
|
||||||
|
indexSpec: { key: Record<string, any>; unique?: boolean; sparse?: boolean; expireAfterSeconds?: number }
|
||||||
|
): Promise<void> {
|
||||||
|
await this.createCollection(dbName, collName);
|
||||||
|
const indexPath = this.getIndexPath(dbName, collName);
|
||||||
|
const indexes = await this.readJsonFile<any[]>(indexPath, [
|
||||||
|
{ name: '_id_', key: { _id: 1 }, unique: true }
|
||||||
|
]);
|
||||||
|
|
||||||
|
const existingIdx = indexes.findIndex(i => i.name === indexName);
|
||||||
|
if (existingIdx >= 0) {
|
||||||
|
indexes[existingIdx] = { name: indexName, ...indexSpec };
|
||||||
|
} else {
|
||||||
|
indexes.push({ name: indexName, ...indexSpec });
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.writeJsonFile(indexPath, indexes);
|
||||||
|
}
|
||||||
|
|
||||||
|
async getIndexes(dbName: string, collName: string): Promise<Array<{
|
||||||
|
name: string;
|
||||||
|
key: Record<string, any>;
|
||||||
|
unique?: boolean;
|
||||||
|
sparse?: boolean;
|
||||||
|
expireAfterSeconds?: number;
|
||||||
|
}>> {
|
||||||
|
const indexPath = this.getIndexPath(dbName, collName);
|
||||||
|
return this.readJsonFile(indexPath, [{ name: '_id_', key: { _id: 1 }, unique: true }]);
|
||||||
|
}
|
||||||
|
|
||||||
|
async dropIndex(dbName: string, collName: string, indexName: string): Promise<boolean> {
|
||||||
|
if (indexName === '_id_') {
|
||||||
|
throw new Error('Cannot drop _id index');
|
||||||
|
}
|
||||||
|
|
||||||
|
const indexPath = this.getIndexPath(dbName, collName);
|
||||||
|
const indexes = await this.readJsonFile<any[]>(indexPath, []);
|
||||||
|
|
||||||
|
const idx = indexes.findIndex(i => i.name === indexName);
|
||||||
|
if (idx >= 0) {
|
||||||
|
indexes.splice(idx, 1);
|
||||||
|
await this.writeJsonFile(indexPath, indexes);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// OpLog Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
async appendOpLog(entry: IOpLogEntry): Promise<void> {
|
||||||
|
const opLogPath = this.getOpLogPath();
|
||||||
|
const opLog = await this.readJsonFile<IOpLogEntry[]>(opLogPath, []);
|
||||||
|
opLog.push(entry);
|
||||||
|
|
||||||
|
// Trim oplog if it gets too large
|
||||||
|
if (opLog.length > 10000) {
|
||||||
|
opLog.splice(0, opLog.length - 10000);
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.writeJsonFile(opLogPath, opLog);
|
||||||
|
}
|
||||||
|
|
||||||
|
async getOpLogAfter(ts: plugins.bson.Timestamp, limit: number = 1000): Promise<IOpLogEntry[]> {
|
||||||
|
const opLogPath = this.getOpLogPath();
|
||||||
|
const opLog = await this.readJsonFile<any[]>(opLogPath, []);
|
||||||
|
const tsValue = ts.toNumber();
|
||||||
|
|
||||||
|
const entries = opLog.filter(e => {
|
||||||
|
const entryTs = e.ts.toNumber ? e.ts.toNumber() : (e.ts.t * 4294967296 + e.ts.i);
|
||||||
|
return entryTs > tsValue;
|
||||||
|
});
|
||||||
|
|
||||||
|
return entries.slice(0, limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
async getLatestOpLogTimestamp(): Promise<plugins.bson.Timestamp | null> {
|
||||||
|
const opLogPath = this.getOpLogPath();
|
||||||
|
const opLog = await this.readJsonFile<any[]>(opLogPath, []);
|
||||||
|
if (opLog.length === 0) return null;
|
||||||
|
|
||||||
|
const last = opLog[opLog.length - 1];
|
||||||
|
if (last.ts instanceof plugins.bson.Timestamp) {
|
||||||
|
return last.ts;
|
||||||
|
}
|
||||||
|
return new plugins.bson.Timestamp({ t: last.ts.t, i: last.ts.i });
|
||||||
|
}
|
||||||
|
|
||||||
|
generateTimestamp(): plugins.bson.Timestamp {
|
||||||
|
this.opLogCounter++;
|
||||||
|
return new plugins.bson.Timestamp({ t: Math.floor(Date.now() / 1000), i: this.opLogCounter });
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Transaction Support
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
async createSnapshot(dbName: string, collName: string): Promise<IStoredDocument[]> {
|
||||||
|
const docs = await this.findAll(dbName, collName);
|
||||||
|
return docs.map(doc => JSON.parse(JSON.stringify(doc)));
|
||||||
|
}
|
||||||
|
|
||||||
|
async hasConflicts(
|
||||||
|
dbName: string,
|
||||||
|
collName: string,
|
||||||
|
ids: plugins.bson.ObjectId[],
|
||||||
|
snapshotTime: plugins.bson.Timestamp
|
||||||
|
): Promise<boolean> {
|
||||||
|
const opLogPath = this.getOpLogPath();
|
||||||
|
const opLog = await this.readJsonFile<any[]>(opLogPath, []);
|
||||||
|
const ns = `${dbName}.${collName}`;
|
||||||
|
const snapshotTs = snapshotTime.toNumber();
|
||||||
|
const modifiedIds = new Set<string>();
|
||||||
|
|
||||||
|
for (const entry of opLog) {
|
||||||
|
const entryTs = entry.ts.toNumber ? entry.ts.toNumber() : (entry.ts.t * 4294967296 + entry.ts.i);
|
||||||
|
if (entryTs > snapshotTs && entry.ns === ns) {
|
||||||
|
if (entry.o._id) {
|
||||||
|
modifiedIds.add(entry.o._id.toString());
|
||||||
|
}
|
||||||
|
if (entry.o2?._id) {
|
||||||
|
modifiedIds.add(entry.o2._id.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const id of ids) {
|
||||||
|
if (modifiedIds.has(id.toString())) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
208
ts/ts_tsmdb/storage/IStorageAdapter.ts
Normal file
208
ts/ts_tsmdb/storage/IStorageAdapter.ts
Normal file
@@ -0,0 +1,208 @@
|
|||||||
|
import type * as plugins from '../plugins.js';
|
||||||
|
import type { IStoredDocument, IOpLogEntry, Document } from '../types/interfaces.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Storage adapter interface for TsmDB
|
||||||
|
* Implementations can provide different storage backends (memory, file, etc.)
|
||||||
|
*/
|
||||||
|
export interface IStorageAdapter {
|
||||||
|
/**
|
||||||
|
* Initialize the storage adapter
|
||||||
|
*/
|
||||||
|
initialize(): Promise<void>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Close the storage adapter and release resources
|
||||||
|
*/
|
||||||
|
close(): Promise<void>;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Database Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List all database names
|
||||||
|
*/
|
||||||
|
listDatabases(): Promise<string[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a database (typically lazy - just marks it as existing)
|
||||||
|
*/
|
||||||
|
createDatabase(dbName: string): Promise<void>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Drop a database and all its collections
|
||||||
|
*/
|
||||||
|
dropDatabase(dbName: string): Promise<boolean>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a database exists
|
||||||
|
*/
|
||||||
|
databaseExists(dbName: string): Promise<boolean>;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Collection Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List all collection names in a database
|
||||||
|
*/
|
||||||
|
listCollections(dbName: string): Promise<string[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a collection
|
||||||
|
*/
|
||||||
|
createCollection(dbName: string, collName: string): Promise<void>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Drop a collection
|
||||||
|
*/
|
||||||
|
dropCollection(dbName: string, collName: string): Promise<boolean>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a collection exists
|
||||||
|
*/
|
||||||
|
collectionExists(dbName: string, collName: string): Promise<boolean>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rename a collection
|
||||||
|
*/
|
||||||
|
renameCollection(dbName: string, oldName: string, newName: string): Promise<void>;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Document Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Insert a single document
|
||||||
|
* @returns The inserted document with _id
|
||||||
|
*/
|
||||||
|
insertOne(dbName: string, collName: string, doc: Document): Promise<IStoredDocument>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Insert multiple documents
|
||||||
|
* @returns Array of inserted documents with _ids
|
||||||
|
*/
|
||||||
|
insertMany(dbName: string, collName: string, docs: Document[]): Promise<IStoredDocument[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find all documents in a collection
|
||||||
|
*/
|
||||||
|
findAll(dbName: string, collName: string): Promise<IStoredDocument[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find documents by a set of _id strings (hex format)
|
||||||
|
* Used for index-accelerated queries
|
||||||
|
*/
|
||||||
|
findByIds(dbName: string, collName: string, ids: Set<string>): Promise<IStoredDocument[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find a document by _id
|
||||||
|
*/
|
||||||
|
findById(dbName: string, collName: string, id: plugins.bson.ObjectId): Promise<IStoredDocument | null>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update a document by _id
|
||||||
|
* @returns true if document was updated
|
||||||
|
*/
|
||||||
|
updateById(dbName: string, collName: string, id: plugins.bson.ObjectId, doc: IStoredDocument): Promise<boolean>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a document by _id
|
||||||
|
* @returns true if document was deleted
|
||||||
|
*/
|
||||||
|
deleteById(dbName: string, collName: string, id: plugins.bson.ObjectId): Promise<boolean>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete multiple documents by _id
|
||||||
|
* @returns Number of deleted documents
|
||||||
|
*/
|
||||||
|
deleteByIds(dbName: string, collName: string, ids: plugins.bson.ObjectId[]): Promise<number>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the count of documents in a collection
|
||||||
|
*/
|
||||||
|
count(dbName: string, collName: string): Promise<number>;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Index Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Store index metadata
|
||||||
|
*/
|
||||||
|
saveIndex(
|
||||||
|
dbName: string,
|
||||||
|
collName: string,
|
||||||
|
indexName: string,
|
||||||
|
indexSpec: { key: Record<string, any>; unique?: boolean; sparse?: boolean; expireAfterSeconds?: number }
|
||||||
|
): Promise<void>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all index metadata for a collection
|
||||||
|
*/
|
||||||
|
getIndexes(dbName: string, collName: string): Promise<Array<{
|
||||||
|
name: string;
|
||||||
|
key: Record<string, any>;
|
||||||
|
unique?: boolean;
|
||||||
|
sparse?: boolean;
|
||||||
|
expireAfterSeconds?: number;
|
||||||
|
}>>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Drop an index
|
||||||
|
*/
|
||||||
|
dropIndex(dbName: string, collName: string, indexName: string): Promise<boolean>;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// OpLog Operations (for change streams)
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Append an operation to the oplog
|
||||||
|
*/
|
||||||
|
appendOpLog(entry: IOpLogEntry): Promise<void>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get oplog entries after a timestamp
|
||||||
|
*/
|
||||||
|
getOpLogAfter(ts: plugins.bson.Timestamp, limit?: number): Promise<IOpLogEntry[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the latest oplog timestamp
|
||||||
|
*/
|
||||||
|
getLatestOpLogTimestamp(): Promise<plugins.bson.Timestamp | null>;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Transaction Support
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a snapshot of current data for transaction isolation
|
||||||
|
*/
|
||||||
|
createSnapshot(dbName: string, collName: string): Promise<IStoredDocument[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if any documents have been modified since the snapshot
|
||||||
|
*/
|
||||||
|
hasConflicts(
|
||||||
|
dbName: string,
|
||||||
|
collName: string,
|
||||||
|
ids: plugins.bson.ObjectId[],
|
||||||
|
snapshotTime: plugins.bson.Timestamp
|
||||||
|
): Promise<boolean>;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Persistence (optional, for MemoryStorageAdapter with file backup)
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Persist current state to disk (if supported)
|
||||||
|
*/
|
||||||
|
persist?(): Promise<void>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load state from disk (if supported)
|
||||||
|
*/
|
||||||
|
restore?(): Promise<void>;
|
||||||
|
}
|
||||||
455
ts/ts_tsmdb/storage/MemoryStorageAdapter.ts
Normal file
455
ts/ts_tsmdb/storage/MemoryStorageAdapter.ts
Normal file
@@ -0,0 +1,455 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { IStorageAdapter } from './IStorageAdapter.js';
|
||||||
|
import type { IStoredDocument, IOpLogEntry, Document } from '../types/interfaces.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* In-memory storage adapter for TsmDB
|
||||||
|
* Optionally supports persistence to a file
|
||||||
|
*/
|
||||||
|
export class MemoryStorageAdapter implements IStorageAdapter {
|
||||||
|
// Database -> Collection -> Documents
|
||||||
|
private databases: Map<string, Map<string, Map<string, IStoredDocument>>> = new Map();
|
||||||
|
|
||||||
|
// Database -> Collection -> Indexes
|
||||||
|
private indexes: Map<string, Map<string, Array<{
|
||||||
|
name: string;
|
||||||
|
key: Record<string, any>;
|
||||||
|
unique?: boolean;
|
||||||
|
sparse?: boolean;
|
||||||
|
expireAfterSeconds?: number;
|
||||||
|
}>>> = new Map();
|
||||||
|
|
||||||
|
// OpLog entries
|
||||||
|
private opLog: IOpLogEntry[] = [];
|
||||||
|
private opLogCounter = 0;
|
||||||
|
|
||||||
|
// Persistence settings
|
||||||
|
private persistPath?: string;
|
||||||
|
private persistInterval?: ReturnType<typeof setInterval>;
|
||||||
|
private fs = new plugins.smartfs.SmartFs(new plugins.smartfs.SmartFsProviderNode());
|
||||||
|
|
||||||
|
constructor(options?: { persistPath?: string; persistIntervalMs?: number }) {
|
||||||
|
this.persistPath = options?.persistPath;
|
||||||
|
if (this.persistPath && options?.persistIntervalMs) {
|
||||||
|
this.persistInterval = setInterval(() => {
|
||||||
|
this.persist().catch(console.error);
|
||||||
|
}, options.persistIntervalMs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async initialize(): Promise<void> {
|
||||||
|
if (this.persistPath) {
|
||||||
|
await this.restore();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async close(): Promise<void> {
|
||||||
|
if (this.persistInterval) {
|
||||||
|
clearInterval(this.persistInterval);
|
||||||
|
}
|
||||||
|
if (this.persistPath) {
|
||||||
|
await this.persist();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Database Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
async listDatabases(): Promise<string[]> {
|
||||||
|
return Array.from(this.databases.keys());
|
||||||
|
}
|
||||||
|
|
||||||
|
async createDatabase(dbName: string): Promise<void> {
|
||||||
|
if (!this.databases.has(dbName)) {
|
||||||
|
this.databases.set(dbName, new Map());
|
||||||
|
this.indexes.set(dbName, new Map());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async dropDatabase(dbName: string): Promise<boolean> {
|
||||||
|
const existed = this.databases.has(dbName);
|
||||||
|
this.databases.delete(dbName);
|
||||||
|
this.indexes.delete(dbName);
|
||||||
|
return existed;
|
||||||
|
}
|
||||||
|
|
||||||
|
async databaseExists(dbName: string): Promise<boolean> {
|
||||||
|
return this.databases.has(dbName);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Collection Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
async listCollections(dbName: string): Promise<string[]> {
|
||||||
|
const db = this.databases.get(dbName);
|
||||||
|
return db ? Array.from(db.keys()) : [];
|
||||||
|
}
|
||||||
|
|
||||||
|
async createCollection(dbName: string, collName: string): Promise<void> {
|
||||||
|
await this.createDatabase(dbName);
|
||||||
|
const db = this.databases.get(dbName)!;
|
||||||
|
if (!db.has(collName)) {
|
||||||
|
db.set(collName, new Map());
|
||||||
|
// Initialize default _id index
|
||||||
|
const dbIndexes = this.indexes.get(dbName)!;
|
||||||
|
dbIndexes.set(collName, [{ name: '_id_', key: { _id: 1 }, unique: true }]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async dropCollection(dbName: string, collName: string): Promise<boolean> {
|
||||||
|
const db = this.databases.get(dbName);
|
||||||
|
if (!db) return false;
|
||||||
|
const existed = db.has(collName);
|
||||||
|
db.delete(collName);
|
||||||
|
const dbIndexes = this.indexes.get(dbName);
|
||||||
|
if (dbIndexes) {
|
||||||
|
dbIndexes.delete(collName);
|
||||||
|
}
|
||||||
|
return existed;
|
||||||
|
}
|
||||||
|
|
||||||
|
async collectionExists(dbName: string, collName: string): Promise<boolean> {
|
||||||
|
const db = this.databases.get(dbName);
|
||||||
|
return db ? db.has(collName) : false;
|
||||||
|
}
|
||||||
|
|
||||||
|
async renameCollection(dbName: string, oldName: string, newName: string): Promise<void> {
|
||||||
|
const db = this.databases.get(dbName);
|
||||||
|
if (!db || !db.has(oldName)) {
|
||||||
|
throw new Error(`Collection ${oldName} not found`);
|
||||||
|
}
|
||||||
|
const collection = db.get(oldName)!;
|
||||||
|
db.set(newName, collection);
|
||||||
|
db.delete(oldName);
|
||||||
|
|
||||||
|
// Also rename indexes
|
||||||
|
const dbIndexes = this.indexes.get(dbName);
|
||||||
|
if (dbIndexes && dbIndexes.has(oldName)) {
|
||||||
|
const collIndexes = dbIndexes.get(oldName)!;
|
||||||
|
dbIndexes.set(newName, collIndexes);
|
||||||
|
dbIndexes.delete(oldName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Document Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
private getCollection(dbName: string, collName: string): Map<string, IStoredDocument> {
|
||||||
|
const db = this.databases.get(dbName);
|
||||||
|
if (!db) {
|
||||||
|
throw new Error(`Database ${dbName} not found`);
|
||||||
|
}
|
||||||
|
const collection = db.get(collName);
|
||||||
|
if (!collection) {
|
||||||
|
throw new Error(`Collection ${collName} not found`);
|
||||||
|
}
|
||||||
|
return collection;
|
||||||
|
}
|
||||||
|
|
||||||
|
private ensureCollection(dbName: string, collName: string): Map<string, IStoredDocument> {
|
||||||
|
if (!this.databases.has(dbName)) {
|
||||||
|
this.databases.set(dbName, new Map());
|
||||||
|
this.indexes.set(dbName, new Map());
|
||||||
|
}
|
||||||
|
const db = this.databases.get(dbName)!;
|
||||||
|
if (!db.has(collName)) {
|
||||||
|
db.set(collName, new Map());
|
||||||
|
const dbIndexes = this.indexes.get(dbName)!;
|
||||||
|
dbIndexes.set(collName, [{ name: '_id_', key: { _id: 1 }, unique: true }]);
|
||||||
|
}
|
||||||
|
return db.get(collName)!;
|
||||||
|
}
|
||||||
|
|
||||||
|
async insertOne(dbName: string, collName: string, doc: Document): Promise<IStoredDocument> {
|
||||||
|
const collection = this.ensureCollection(dbName, collName);
|
||||||
|
const storedDoc: IStoredDocument = {
|
||||||
|
...doc,
|
||||||
|
_id: doc._id instanceof plugins.bson.ObjectId ? doc._id : new plugins.bson.ObjectId(doc._id),
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!storedDoc._id) {
|
||||||
|
storedDoc._id = new plugins.bson.ObjectId();
|
||||||
|
}
|
||||||
|
|
||||||
|
const idStr = storedDoc._id.toHexString();
|
||||||
|
if (collection.has(idStr)) {
|
||||||
|
throw new Error(`Duplicate key error: _id ${idStr}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
collection.set(idStr, storedDoc);
|
||||||
|
return storedDoc;
|
||||||
|
}
|
||||||
|
|
||||||
|
async insertMany(dbName: string, collName: string, docs: Document[]): Promise<IStoredDocument[]> {
|
||||||
|
const results: IStoredDocument[] = [];
|
||||||
|
for (const doc of docs) {
|
||||||
|
results.push(await this.insertOne(dbName, collName, doc));
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
async findAll(dbName: string, collName: string): Promise<IStoredDocument[]> {
|
||||||
|
const collection = this.ensureCollection(dbName, collName);
|
||||||
|
return Array.from(collection.values());
|
||||||
|
}
|
||||||
|
|
||||||
|
async findByIds(dbName: string, collName: string, ids: Set<string>): Promise<IStoredDocument[]> {
|
||||||
|
const collection = this.ensureCollection(dbName, collName);
|
||||||
|
const results: IStoredDocument[] = [];
|
||||||
|
for (const id of ids) {
|
||||||
|
const doc = collection.get(id);
|
||||||
|
if (doc) {
|
||||||
|
results.push(doc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
async findById(dbName: string, collName: string, id: plugins.bson.ObjectId): Promise<IStoredDocument | null> {
|
||||||
|
const collection = this.ensureCollection(dbName, collName);
|
||||||
|
return collection.get(id.toHexString()) || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async updateById(dbName: string, collName: string, id: plugins.bson.ObjectId, doc: IStoredDocument): Promise<boolean> {
|
||||||
|
const collection = this.ensureCollection(dbName, collName);
|
||||||
|
const idStr = id.toHexString();
|
||||||
|
if (!collection.has(idStr)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
collection.set(idStr, doc);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
async deleteById(dbName: string, collName: string, id: plugins.bson.ObjectId): Promise<boolean> {
|
||||||
|
const collection = this.ensureCollection(dbName, collName);
|
||||||
|
return collection.delete(id.toHexString());
|
||||||
|
}
|
||||||
|
|
||||||
|
async deleteByIds(dbName: string, collName: string, ids: plugins.bson.ObjectId[]): Promise<number> {
|
||||||
|
let count = 0;
|
||||||
|
for (const id of ids) {
|
||||||
|
if (await this.deleteById(dbName, collName, id)) {
|
||||||
|
count++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return count;
|
||||||
|
}
|
||||||
|
|
||||||
|
async count(dbName: string, collName: string): Promise<number> {
|
||||||
|
const collection = this.ensureCollection(dbName, collName);
|
||||||
|
return collection.size;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Index Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
async saveIndex(
|
||||||
|
dbName: string,
|
||||||
|
collName: string,
|
||||||
|
indexName: string,
|
||||||
|
indexSpec: { key: Record<string, any>; unique?: boolean; sparse?: boolean; expireAfterSeconds?: number }
|
||||||
|
): Promise<void> {
|
||||||
|
await this.createCollection(dbName, collName);
|
||||||
|
const dbIndexes = this.indexes.get(dbName)!;
|
||||||
|
let collIndexes = dbIndexes.get(collName);
|
||||||
|
if (!collIndexes) {
|
||||||
|
collIndexes = [{ name: '_id_', key: { _id: 1 }, unique: true }];
|
||||||
|
dbIndexes.set(collName, collIndexes);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if index already exists
|
||||||
|
const existingIndex = collIndexes.findIndex(i => i.name === indexName);
|
||||||
|
if (existingIndex >= 0) {
|
||||||
|
collIndexes[existingIndex] = { name: indexName, ...indexSpec };
|
||||||
|
} else {
|
||||||
|
collIndexes.push({ name: indexName, ...indexSpec });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getIndexes(dbName: string, collName: string): Promise<Array<{
|
||||||
|
name: string;
|
||||||
|
key: Record<string, any>;
|
||||||
|
unique?: boolean;
|
||||||
|
sparse?: boolean;
|
||||||
|
expireAfterSeconds?: number;
|
||||||
|
}>> {
|
||||||
|
const dbIndexes = this.indexes.get(dbName);
|
||||||
|
if (!dbIndexes) return [{ name: '_id_', key: { _id: 1 }, unique: true }];
|
||||||
|
const collIndexes = dbIndexes.get(collName);
|
||||||
|
return collIndexes || [{ name: '_id_', key: { _id: 1 }, unique: true }];
|
||||||
|
}
|
||||||
|
|
||||||
|
async dropIndex(dbName: string, collName: string, indexName: string): Promise<boolean> {
|
||||||
|
if (indexName === '_id_') {
|
||||||
|
throw new Error('Cannot drop _id index');
|
||||||
|
}
|
||||||
|
const dbIndexes = this.indexes.get(dbName);
|
||||||
|
if (!dbIndexes) return false;
|
||||||
|
const collIndexes = dbIndexes.get(collName);
|
||||||
|
if (!collIndexes) return false;
|
||||||
|
|
||||||
|
const idx = collIndexes.findIndex(i => i.name === indexName);
|
||||||
|
if (idx >= 0) {
|
||||||
|
collIndexes.splice(idx, 1);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// OpLog Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
async appendOpLog(entry: IOpLogEntry): Promise<void> {
|
||||||
|
this.opLog.push(entry);
|
||||||
|
// Trim oplog if it gets too large (keep last 10000 entries)
|
||||||
|
if (this.opLog.length > 10000) {
|
||||||
|
this.opLog = this.opLog.slice(-10000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getOpLogAfter(ts: plugins.bson.Timestamp, limit: number = 1000): Promise<IOpLogEntry[]> {
|
||||||
|
const tsValue = ts.toNumber();
|
||||||
|
const entries = this.opLog.filter(e => e.ts.toNumber() > tsValue);
|
||||||
|
return entries.slice(0, limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
async getLatestOpLogTimestamp(): Promise<plugins.bson.Timestamp | null> {
|
||||||
|
if (this.opLog.length === 0) return null;
|
||||||
|
return this.opLog[this.opLog.length - 1].ts;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate a new timestamp for oplog entries
|
||||||
|
*/
|
||||||
|
generateTimestamp(): plugins.bson.Timestamp {
|
||||||
|
this.opLogCounter++;
|
||||||
|
return new plugins.bson.Timestamp({ t: Math.floor(Date.now() / 1000), i: this.opLogCounter });
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Transaction Support
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
async createSnapshot(dbName: string, collName: string): Promise<IStoredDocument[]> {
|
||||||
|
const docs = await this.findAll(dbName, collName);
|
||||||
|
// Deep clone the documents for snapshot isolation
|
||||||
|
return docs.map(doc => JSON.parse(JSON.stringify(doc)));
|
||||||
|
}
|
||||||
|
|
||||||
|
async hasConflicts(
|
||||||
|
dbName: string,
|
||||||
|
collName: string,
|
||||||
|
ids: plugins.bson.ObjectId[],
|
||||||
|
snapshotTime: plugins.bson.Timestamp
|
||||||
|
): Promise<boolean> {
|
||||||
|
// Check if any of the given document IDs have been modified after snapshotTime
|
||||||
|
const ns = `${dbName}.${collName}`;
|
||||||
|
const modifiedIds = new Set<string>();
|
||||||
|
|
||||||
|
for (const entry of this.opLog) {
|
||||||
|
if (entry.ts.greaterThan(snapshotTime) && entry.ns === ns) {
|
||||||
|
if (entry.o._id) {
|
||||||
|
modifiedIds.add(entry.o._id.toString());
|
||||||
|
}
|
||||||
|
if (entry.o2?._id) {
|
||||||
|
modifiedIds.add(entry.o2._id.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const id of ids) {
|
||||||
|
if (modifiedIds.has(id.toString())) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Persistence
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
async persist(): Promise<void> {
|
||||||
|
if (!this.persistPath) return;
|
||||||
|
|
||||||
|
const data = {
|
||||||
|
databases: {} as Record<string, Record<string, IStoredDocument[]>>,
|
||||||
|
indexes: {} as Record<string, Record<string, any[]>>,
|
||||||
|
opLogCounter: this.opLogCounter,
|
||||||
|
};
|
||||||
|
|
||||||
|
for (const [dbName, collections] of this.databases) {
|
||||||
|
data.databases[dbName] = {};
|
||||||
|
for (const [collName, docs] of collections) {
|
||||||
|
data.databases[dbName][collName] = Array.from(docs.values());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [dbName, collIndexes] of this.indexes) {
|
||||||
|
data.indexes[dbName] = {};
|
||||||
|
for (const [collName, indexes] of collIndexes) {
|
||||||
|
data.indexes[dbName][collName] = indexes;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure parent directory exists
|
||||||
|
const dir = this.persistPath.substring(0, this.persistPath.lastIndexOf('/'));
|
||||||
|
if (dir) {
|
||||||
|
await this.fs.directory(dir).recursive().create();
|
||||||
|
}
|
||||||
|
await this.fs.file(this.persistPath).encoding('utf8').write(JSON.stringify(data, null, 2));
|
||||||
|
}
|
||||||
|
|
||||||
|
async restore(): Promise<void> {
|
||||||
|
if (!this.persistPath) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const exists = await this.fs.file(this.persistPath).exists();
|
||||||
|
if (!exists) return;
|
||||||
|
|
||||||
|
const content = await this.fs.file(this.persistPath).encoding('utf8').read();
|
||||||
|
const data = JSON.parse(content as string);
|
||||||
|
|
||||||
|
this.databases.clear();
|
||||||
|
this.indexes.clear();
|
||||||
|
|
||||||
|
for (const [dbName, collections] of Object.entries(data.databases || {})) {
|
||||||
|
const dbMap = new Map<string, Map<string, IStoredDocument>>();
|
||||||
|
this.databases.set(dbName, dbMap);
|
||||||
|
|
||||||
|
for (const [collName, docs] of Object.entries(collections as Record<string, any[]>)) {
|
||||||
|
const collMap = new Map<string, IStoredDocument>();
|
||||||
|
for (const doc of docs) {
|
||||||
|
// Restore ObjectId
|
||||||
|
if (doc._id && typeof doc._id === 'string') {
|
||||||
|
doc._id = new plugins.bson.ObjectId(doc._id);
|
||||||
|
} else if (doc._id && typeof doc._id === 'object' && doc._id.$oid) {
|
||||||
|
doc._id = new plugins.bson.ObjectId(doc._id.$oid);
|
||||||
|
}
|
||||||
|
collMap.set(doc._id.toHexString(), doc);
|
||||||
|
}
|
||||||
|
dbMap.set(collName, collMap);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [dbName, collIndexes] of Object.entries(data.indexes || {})) {
|
||||||
|
const indexMap = new Map<string, any[]>();
|
||||||
|
this.indexes.set(dbName, indexMap);
|
||||||
|
for (const [collName, indexes] of Object.entries(collIndexes as Record<string, any[]>)) {
|
||||||
|
indexMap.set(collName, indexes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.opLogCounter = data.opLogCounter || 0;
|
||||||
|
} catch (error) {
|
||||||
|
// If restore fails, start fresh
|
||||||
|
console.warn('Failed to restore from persistence:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
282
ts/ts_tsmdb/storage/OpLog.ts
Normal file
282
ts/ts_tsmdb/storage/OpLog.ts
Normal file
@@ -0,0 +1,282 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { IStorageAdapter } from './IStorageAdapter.js';
|
||||||
|
import type { IOpLogEntry, Document, IResumeToken, ChangeStreamOperationType } from '../types/interfaces.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Operation Log for tracking all mutations
|
||||||
|
* Used primarily for change stream support
|
||||||
|
*/
|
||||||
|
export class OpLog {
|
||||||
|
private storage: IStorageAdapter;
|
||||||
|
private counter = 0;
|
||||||
|
private listeners: Array<(entry: IOpLogEntry) => void> = [];
|
||||||
|
|
||||||
|
constructor(storage: IStorageAdapter) {
|
||||||
|
this.storage = storage;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate a new timestamp for oplog entries
|
||||||
|
*/
|
||||||
|
generateTimestamp(): plugins.bson.Timestamp {
|
||||||
|
this.counter++;
|
||||||
|
return new plugins.bson.Timestamp({ t: Math.floor(Date.now() / 1000), i: this.counter });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate a resume token from a timestamp
|
||||||
|
*/
|
||||||
|
generateResumeToken(ts: plugins.bson.Timestamp): IResumeToken {
|
||||||
|
// Create a resume token similar to MongoDB's format
|
||||||
|
// It's a base64-encoded BSON document containing the timestamp
|
||||||
|
const tokenData = {
|
||||||
|
_data: Buffer.from(JSON.stringify({
|
||||||
|
ts: { t: ts.high, i: ts.low },
|
||||||
|
version: 1,
|
||||||
|
})).toString('base64'),
|
||||||
|
};
|
||||||
|
return tokenData;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a resume token to get the timestamp
|
||||||
|
*/
|
||||||
|
parseResumeToken(token: IResumeToken): plugins.bson.Timestamp {
|
||||||
|
try {
|
||||||
|
const data = JSON.parse(Buffer.from(token._data, 'base64').toString('utf-8'));
|
||||||
|
return new plugins.bson.Timestamp({ t: data.ts.t, i: data.ts.i });
|
||||||
|
} catch {
|
||||||
|
throw new Error('Invalid resume token');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record an insert operation
|
||||||
|
*/
|
||||||
|
async recordInsert(
|
||||||
|
dbName: string,
|
||||||
|
collName: string,
|
||||||
|
document: Document,
|
||||||
|
txnInfo?: { txnNumber?: number; lsid?: { id: plugins.bson.Binary } }
|
||||||
|
): Promise<IOpLogEntry> {
|
||||||
|
const entry: IOpLogEntry = {
|
||||||
|
ts: this.generateTimestamp(),
|
||||||
|
op: 'i',
|
||||||
|
ns: `${dbName}.${collName}`,
|
||||||
|
o: document,
|
||||||
|
...txnInfo,
|
||||||
|
};
|
||||||
|
|
||||||
|
await this.storage.appendOpLog(entry);
|
||||||
|
this.notifyListeners(entry);
|
||||||
|
return entry;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record an update operation
|
||||||
|
*/
|
||||||
|
async recordUpdate(
|
||||||
|
dbName: string,
|
||||||
|
collName: string,
|
||||||
|
filter: Document,
|
||||||
|
update: Document,
|
||||||
|
txnInfo?: { txnNumber?: number; lsid?: { id: plugins.bson.Binary } }
|
||||||
|
): Promise<IOpLogEntry> {
|
||||||
|
const entry: IOpLogEntry = {
|
||||||
|
ts: this.generateTimestamp(),
|
||||||
|
op: 'u',
|
||||||
|
ns: `${dbName}.${collName}`,
|
||||||
|
o: update,
|
||||||
|
o2: filter,
|
||||||
|
...txnInfo,
|
||||||
|
};
|
||||||
|
|
||||||
|
await this.storage.appendOpLog(entry);
|
||||||
|
this.notifyListeners(entry);
|
||||||
|
return entry;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record a delete operation
|
||||||
|
*/
|
||||||
|
async recordDelete(
|
||||||
|
dbName: string,
|
||||||
|
collName: string,
|
||||||
|
filter: Document,
|
||||||
|
txnInfo?: { txnNumber?: number; lsid?: { id: plugins.bson.Binary } }
|
||||||
|
): Promise<IOpLogEntry> {
|
||||||
|
const entry: IOpLogEntry = {
|
||||||
|
ts: this.generateTimestamp(),
|
||||||
|
op: 'd',
|
||||||
|
ns: `${dbName}.${collName}`,
|
||||||
|
o: filter,
|
||||||
|
...txnInfo,
|
||||||
|
};
|
||||||
|
|
||||||
|
await this.storage.appendOpLog(entry);
|
||||||
|
this.notifyListeners(entry);
|
||||||
|
return entry;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record a command (drop, rename, etc.)
|
||||||
|
*/
|
||||||
|
async recordCommand(
|
||||||
|
dbName: string,
|
||||||
|
command: Document
|
||||||
|
): Promise<IOpLogEntry> {
|
||||||
|
const entry: IOpLogEntry = {
|
||||||
|
ts: this.generateTimestamp(),
|
||||||
|
op: 'c',
|
||||||
|
ns: `${dbName}.$cmd`,
|
||||||
|
o: command,
|
||||||
|
};
|
||||||
|
|
||||||
|
await this.storage.appendOpLog(entry);
|
||||||
|
this.notifyListeners(entry);
|
||||||
|
return entry;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get oplog entries after a timestamp
|
||||||
|
*/
|
||||||
|
async getEntriesAfter(ts: plugins.bson.Timestamp, limit?: number): Promise<IOpLogEntry[]> {
|
||||||
|
return this.storage.getOpLogAfter(ts, limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the latest timestamp
|
||||||
|
*/
|
||||||
|
async getLatestTimestamp(): Promise<plugins.bson.Timestamp | null> {
|
||||||
|
return this.storage.getLatestOpLogTimestamp();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Subscribe to oplog changes (for change streams)
|
||||||
|
*/
|
||||||
|
subscribe(listener: (entry: IOpLogEntry) => void): () => void {
|
||||||
|
this.listeners.push(listener);
|
||||||
|
return () => {
|
||||||
|
const idx = this.listeners.indexOf(listener);
|
||||||
|
if (idx >= 0) {
|
||||||
|
this.listeners.splice(idx, 1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Notify all listeners of a new entry
|
||||||
|
*/
|
||||||
|
private notifyListeners(entry: IOpLogEntry): void {
|
||||||
|
for (const listener of this.listeners) {
|
||||||
|
try {
|
||||||
|
listener(entry);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error in oplog listener:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert an oplog entry to a change stream document
|
||||||
|
*/
|
||||||
|
opLogEntryToChangeEvent(
|
||||||
|
entry: IOpLogEntry,
|
||||||
|
fullDocument?: Document,
|
||||||
|
fullDocumentBeforeChange?: Document
|
||||||
|
): {
|
||||||
|
_id: IResumeToken;
|
||||||
|
operationType: ChangeStreamOperationType;
|
||||||
|
fullDocument?: Document;
|
||||||
|
fullDocumentBeforeChange?: Document;
|
||||||
|
ns: { db: string; coll?: string };
|
||||||
|
documentKey?: { _id: plugins.bson.ObjectId };
|
||||||
|
updateDescription?: {
|
||||||
|
updatedFields?: Document;
|
||||||
|
removedFields?: string[];
|
||||||
|
};
|
||||||
|
clusterTime: plugins.bson.Timestamp;
|
||||||
|
} {
|
||||||
|
const [db, coll] = entry.ns.split('.');
|
||||||
|
const resumeToken = this.generateResumeToken(entry.ts);
|
||||||
|
|
||||||
|
const baseEvent = {
|
||||||
|
_id: resumeToken,
|
||||||
|
ns: { db, coll: coll === '$cmd' ? undefined : coll },
|
||||||
|
clusterTime: entry.ts,
|
||||||
|
};
|
||||||
|
|
||||||
|
switch (entry.op) {
|
||||||
|
case 'i':
|
||||||
|
return {
|
||||||
|
...baseEvent,
|
||||||
|
operationType: 'insert' as ChangeStreamOperationType,
|
||||||
|
fullDocument: fullDocument || entry.o,
|
||||||
|
documentKey: entry.o._id ? { _id: entry.o._id } : undefined,
|
||||||
|
};
|
||||||
|
|
||||||
|
case 'u':
|
||||||
|
const updateEvent: any = {
|
||||||
|
...baseEvent,
|
||||||
|
operationType: 'update' as ChangeStreamOperationType,
|
||||||
|
documentKey: entry.o2?._id ? { _id: entry.o2._id } : undefined,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (fullDocument) {
|
||||||
|
updateEvent.fullDocument = fullDocument;
|
||||||
|
}
|
||||||
|
if (fullDocumentBeforeChange) {
|
||||||
|
updateEvent.fullDocumentBeforeChange = fullDocumentBeforeChange;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse update description
|
||||||
|
if (entry.o.$set || entry.o.$unset) {
|
||||||
|
updateEvent.updateDescription = {
|
||||||
|
updatedFields: entry.o.$set || {},
|
||||||
|
removedFields: entry.o.$unset ? Object.keys(entry.o.$unset) : [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return updateEvent;
|
||||||
|
|
||||||
|
case 'd':
|
||||||
|
return {
|
||||||
|
...baseEvent,
|
||||||
|
operationType: 'delete' as ChangeStreamOperationType,
|
||||||
|
documentKey: entry.o._id ? { _id: entry.o._id } : undefined,
|
||||||
|
fullDocumentBeforeChange,
|
||||||
|
};
|
||||||
|
|
||||||
|
case 'c':
|
||||||
|
if (entry.o.drop) {
|
||||||
|
return {
|
||||||
|
...baseEvent,
|
||||||
|
operationType: 'drop' as ChangeStreamOperationType,
|
||||||
|
ns: { db, coll: entry.o.drop },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (entry.o.dropDatabase) {
|
||||||
|
return {
|
||||||
|
...baseEvent,
|
||||||
|
operationType: 'dropDatabase' as ChangeStreamOperationType,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (entry.o.renameCollection) {
|
||||||
|
return {
|
||||||
|
...baseEvent,
|
||||||
|
operationType: 'rename' as ChangeStreamOperationType,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
...baseEvent,
|
||||||
|
operationType: 'invalidate' as ChangeStreamOperationType,
|
||||||
|
};
|
||||||
|
|
||||||
|
default:
|
||||||
|
return {
|
||||||
|
...baseEvent,
|
||||||
|
operationType: 'invalidate' as ChangeStreamOperationType,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
375
ts/ts_tsmdb/storage/WAL.ts
Normal file
375
ts/ts_tsmdb/storage/WAL.ts
Normal file
@@ -0,0 +1,375 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { Document, IStoredDocument } from '../types/interfaces.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* WAL entry operation types
|
||||||
|
*/
|
||||||
|
export type TWalOperation = 'insert' | 'update' | 'delete' | 'checkpoint' | 'begin' | 'commit' | 'abort';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* WAL entry structure
|
||||||
|
*/
|
||||||
|
export interface IWalEntry {
|
||||||
|
/** Log Sequence Number - monotonically increasing */
|
||||||
|
lsn: number;
|
||||||
|
/** Timestamp of the operation */
|
||||||
|
timestamp: number;
|
||||||
|
/** Operation type */
|
||||||
|
operation: TWalOperation;
|
||||||
|
/** Database name */
|
||||||
|
dbName: string;
|
||||||
|
/** Collection name */
|
||||||
|
collName: string;
|
||||||
|
/** Document ID (hex string) */
|
||||||
|
documentId: string;
|
||||||
|
/** Document data (BSON serialized, base64 encoded) */
|
||||||
|
data?: string;
|
||||||
|
/** Previous document data for updates (for rollback) */
|
||||||
|
previousData?: string;
|
||||||
|
/** Transaction ID if part of a transaction */
|
||||||
|
txnId?: string;
|
||||||
|
/** CRC32 checksum of the entry (excluding this field) */
|
||||||
|
checksum: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checkpoint record
|
||||||
|
*/
|
||||||
|
interface ICheckpointRecord {
|
||||||
|
lsn: number;
|
||||||
|
timestamp: number;
|
||||||
|
lastCommittedLsn: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Write-Ahead Log (WAL) for durability and crash recovery
|
||||||
|
*
|
||||||
|
* The WAL ensures durability by writing operations to a log file before
|
||||||
|
* they are applied to the main storage. On crash recovery, uncommitted
|
||||||
|
* operations can be replayed to restore the database to a consistent state.
|
||||||
|
*/
|
||||||
|
export class WAL {
|
||||||
|
private walPath: string;
|
||||||
|
private currentLsn: number = 0;
|
||||||
|
private lastCheckpointLsn: number = 0;
|
||||||
|
private entries: IWalEntry[] = [];
|
||||||
|
private isInitialized: boolean = false;
|
||||||
|
private fs = new plugins.smartfs.SmartFs(new plugins.smartfs.SmartFsProviderNode());
|
||||||
|
|
||||||
|
// In-memory uncommitted entries per transaction
|
||||||
|
private uncommittedTxns: Map<string, IWalEntry[]> = new Map();
|
||||||
|
|
||||||
|
// Checkpoint interval (number of entries between checkpoints)
|
||||||
|
private checkpointInterval: number = 1000;
|
||||||
|
|
||||||
|
constructor(walPath: string, options?: { checkpointInterval?: number }) {
|
||||||
|
this.walPath = walPath;
|
||||||
|
if (options?.checkpointInterval) {
|
||||||
|
this.checkpointInterval = options.checkpointInterval;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize the WAL, loading existing entries and recovering if needed
|
||||||
|
*/
|
||||||
|
async initialize(): Promise<{ recoveredEntries: IWalEntry[] }> {
|
||||||
|
if (this.isInitialized) {
|
||||||
|
return { recoveredEntries: [] };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure WAL directory exists
|
||||||
|
const walDir = this.walPath.substring(0, this.walPath.lastIndexOf('/'));
|
||||||
|
if (walDir) {
|
||||||
|
await this.fs.directory(walDir).recursive().create();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to load existing WAL
|
||||||
|
const exists = await this.fs.file(this.walPath).exists();
|
||||||
|
if (exists) {
|
||||||
|
const content = await this.fs.file(this.walPath).encoding('utf8').read();
|
||||||
|
const lines = (content as string).split('\n').filter(line => line.trim());
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
try {
|
||||||
|
const entry = JSON.parse(line) as IWalEntry;
|
||||||
|
// Verify checksum
|
||||||
|
if (this.verifyChecksum(entry)) {
|
||||||
|
this.entries.push(entry);
|
||||||
|
if (entry.lsn > this.currentLsn) {
|
||||||
|
this.currentLsn = entry.lsn;
|
||||||
|
}
|
||||||
|
if (entry.operation === 'checkpoint') {
|
||||||
|
this.lastCheckpointLsn = entry.lsn;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Skip corrupted entries
|
||||||
|
console.warn('Skipping corrupted WAL entry');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.isInitialized = true;
|
||||||
|
|
||||||
|
// Return entries after last checkpoint that need recovery
|
||||||
|
const recoveredEntries = this.entries.filter(
|
||||||
|
e => e.lsn > this.lastCheckpointLsn &&
|
||||||
|
(e.operation === 'insert' || e.operation === 'update' || e.operation === 'delete')
|
||||||
|
);
|
||||||
|
|
||||||
|
return { recoveredEntries };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log an insert operation
|
||||||
|
*/
|
||||||
|
async logInsert(dbName: string, collName: string, doc: IStoredDocument, txnId?: string): Promise<number> {
|
||||||
|
return this.appendEntry({
|
||||||
|
operation: 'insert',
|
||||||
|
dbName,
|
||||||
|
collName,
|
||||||
|
documentId: doc._id.toHexString(),
|
||||||
|
data: this.serializeDocument(doc),
|
||||||
|
txnId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log an update operation
|
||||||
|
*/
|
||||||
|
async logUpdate(
|
||||||
|
dbName: string,
|
||||||
|
collName: string,
|
||||||
|
oldDoc: IStoredDocument,
|
||||||
|
newDoc: IStoredDocument,
|
||||||
|
txnId?: string
|
||||||
|
): Promise<number> {
|
||||||
|
return this.appendEntry({
|
||||||
|
operation: 'update',
|
||||||
|
dbName,
|
||||||
|
collName,
|
||||||
|
documentId: oldDoc._id.toHexString(),
|
||||||
|
data: this.serializeDocument(newDoc),
|
||||||
|
previousData: this.serializeDocument(oldDoc),
|
||||||
|
txnId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log a delete operation
|
||||||
|
*/
|
||||||
|
async logDelete(dbName: string, collName: string, doc: IStoredDocument, txnId?: string): Promise<number> {
|
||||||
|
return this.appendEntry({
|
||||||
|
operation: 'delete',
|
||||||
|
dbName,
|
||||||
|
collName,
|
||||||
|
documentId: doc._id.toHexString(),
|
||||||
|
previousData: this.serializeDocument(doc),
|
||||||
|
txnId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log transaction begin
|
||||||
|
*/
|
||||||
|
async logBeginTransaction(txnId: string): Promise<number> {
|
||||||
|
this.uncommittedTxns.set(txnId, []);
|
||||||
|
return this.appendEntry({
|
||||||
|
operation: 'begin',
|
||||||
|
dbName: '',
|
||||||
|
collName: '',
|
||||||
|
documentId: '',
|
||||||
|
txnId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log transaction commit
|
||||||
|
*/
|
||||||
|
async logCommitTransaction(txnId: string): Promise<number> {
|
||||||
|
this.uncommittedTxns.delete(txnId);
|
||||||
|
return this.appendEntry({
|
||||||
|
operation: 'commit',
|
||||||
|
dbName: '',
|
||||||
|
collName: '',
|
||||||
|
documentId: '',
|
||||||
|
txnId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log transaction abort
|
||||||
|
*/
|
||||||
|
async logAbortTransaction(txnId: string): Promise<number> {
|
||||||
|
this.uncommittedTxns.delete(txnId);
|
||||||
|
return this.appendEntry({
|
||||||
|
operation: 'abort',
|
||||||
|
dbName: '',
|
||||||
|
collName: '',
|
||||||
|
documentId: '',
|
||||||
|
txnId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get entries to roll back for an aborted transaction
|
||||||
|
*/
|
||||||
|
getTransactionEntries(txnId: string): IWalEntry[] {
|
||||||
|
return this.entries.filter(e => e.txnId === txnId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a checkpoint - marks a consistent point in the log
|
||||||
|
*/
|
||||||
|
async checkpoint(): Promise<number> {
|
||||||
|
const lsn = await this.appendEntry({
|
||||||
|
operation: 'checkpoint',
|
||||||
|
dbName: '',
|
||||||
|
collName: '',
|
||||||
|
documentId: '',
|
||||||
|
});
|
||||||
|
this.lastCheckpointLsn = lsn;
|
||||||
|
|
||||||
|
// Truncate old entries (keep only entries after checkpoint)
|
||||||
|
await this.truncate();
|
||||||
|
|
||||||
|
return lsn;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Truncate the WAL file, removing entries before the last checkpoint
|
||||||
|
*/
|
||||||
|
private async truncate(): Promise<void> {
|
||||||
|
// Keep entries after last checkpoint
|
||||||
|
const newEntries = this.entries.filter(e => e.lsn >= this.lastCheckpointLsn);
|
||||||
|
this.entries = newEntries;
|
||||||
|
|
||||||
|
// Rewrite the WAL file
|
||||||
|
const lines = this.entries.map(e => JSON.stringify(e)).join('\n');
|
||||||
|
await this.fs.file(this.walPath).encoding('utf8').write(lines);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current LSN
|
||||||
|
*/
|
||||||
|
getCurrentLsn(): number {
|
||||||
|
return this.currentLsn;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get entries after a specific LSN (for recovery)
|
||||||
|
*/
|
||||||
|
getEntriesAfter(lsn: number): IWalEntry[] {
|
||||||
|
return this.entries.filter(e => e.lsn > lsn);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Close the WAL
|
||||||
|
*/
|
||||||
|
async close(): Promise<void> {
|
||||||
|
if (this.isInitialized) {
|
||||||
|
// Final checkpoint before close
|
||||||
|
await this.checkpoint();
|
||||||
|
}
|
||||||
|
this.isInitialized = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Private Methods
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
private async appendEntry(
|
||||||
|
partial: Omit<IWalEntry, 'lsn' | 'timestamp' | 'checksum'>
|
||||||
|
): Promise<number> {
|
||||||
|
await this.initialize();
|
||||||
|
|
||||||
|
this.currentLsn++;
|
||||||
|
const entry: IWalEntry = {
|
||||||
|
...partial,
|
||||||
|
lsn: this.currentLsn,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
checksum: 0, // Will be calculated
|
||||||
|
};
|
||||||
|
|
||||||
|
// Calculate checksum
|
||||||
|
entry.checksum = this.calculateChecksum(entry);
|
||||||
|
|
||||||
|
// Track in transaction if applicable
|
||||||
|
if (partial.txnId && this.uncommittedTxns.has(partial.txnId)) {
|
||||||
|
this.uncommittedTxns.get(partial.txnId)!.push(entry);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add to in-memory log
|
||||||
|
this.entries.push(entry);
|
||||||
|
|
||||||
|
// Append to file (append mode for durability)
|
||||||
|
await this.fs.file(this.walPath).encoding('utf8').append(JSON.stringify(entry) + '\n');
|
||||||
|
|
||||||
|
// Check if we need a checkpoint
|
||||||
|
if (this.entries.length - this.lastCheckpointLsn >= this.checkpointInterval) {
|
||||||
|
await this.checkpoint();
|
||||||
|
}
|
||||||
|
|
||||||
|
return entry.lsn;
|
||||||
|
}
|
||||||
|
|
||||||
|
private serializeDocument(doc: Document): string {
|
||||||
|
// Serialize document to BSON and encode as base64
|
||||||
|
const bsonData = plugins.bson.serialize(doc);
|
||||||
|
return Buffer.from(bsonData).toString('base64');
|
||||||
|
}
|
||||||
|
|
||||||
|
private deserializeDocument(data: string): Document {
|
||||||
|
// Decode base64 and deserialize from BSON
|
||||||
|
const buffer = Buffer.from(data, 'base64');
|
||||||
|
return plugins.bson.deserialize(buffer);
|
||||||
|
}
|
||||||
|
|
||||||
|
private calculateChecksum(entry: IWalEntry): number {
|
||||||
|
// Simple CRC32-like checksum
|
||||||
|
const str = JSON.stringify({
|
||||||
|
lsn: entry.lsn,
|
||||||
|
timestamp: entry.timestamp,
|
||||||
|
operation: entry.operation,
|
||||||
|
dbName: entry.dbName,
|
||||||
|
collName: entry.collName,
|
||||||
|
documentId: entry.documentId,
|
||||||
|
data: entry.data,
|
||||||
|
previousData: entry.previousData,
|
||||||
|
txnId: entry.txnId,
|
||||||
|
});
|
||||||
|
|
||||||
|
let crc = 0xFFFFFFFF;
|
||||||
|
for (let i = 0; i < str.length; i++) {
|
||||||
|
crc ^= str.charCodeAt(i);
|
||||||
|
for (let j = 0; j < 8; j++) {
|
||||||
|
crc = (crc >>> 1) ^ (crc & 1 ? 0xEDB88320 : 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return (~crc) >>> 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
private verifyChecksum(entry: IWalEntry): boolean {
|
||||||
|
const savedChecksum = entry.checksum;
|
||||||
|
entry.checksum = 0;
|
||||||
|
const calculatedChecksum = this.calculateChecksum(entry);
|
||||||
|
entry.checksum = savedChecksum;
|
||||||
|
return calculatedChecksum === savedChecksum;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recover document from WAL entry
|
||||||
|
*/
|
||||||
|
recoverDocument(entry: IWalEntry): IStoredDocument | null {
|
||||||
|
if (!entry.data) return null;
|
||||||
|
return this.deserializeDocument(entry.data) as IStoredDocument;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recover previous document state from WAL entry (for rollback)
|
||||||
|
*/
|
||||||
|
recoverPreviousDocument(entry: IWalEntry): IStoredDocument | null {
|
||||||
|
if (!entry.previousData) return null;
|
||||||
|
return this.deserializeDocument(entry.previousData) as IStoredDocument;
|
||||||
|
}
|
||||||
|
}
|
||||||
433
ts/ts_tsmdb/types/interfaces.ts
Normal file
433
ts/ts_tsmdb/types/interfaces.ts
Normal file
@@ -0,0 +1,433 @@
|
|||||||
|
import type * as plugins from '../plugins.js';
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Document Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export type Document = Record<string, any>;
|
||||||
|
|
||||||
|
export interface WithId<TSchema> {
|
||||||
|
_id: plugins.bson.ObjectId;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Client Options
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export interface ITsmdbClientOptions {
|
||||||
|
/** Storage adapter type: 'memory' or 'file' */
|
||||||
|
storageType?: 'memory' | 'file';
|
||||||
|
/** Path for file-based storage */
|
||||||
|
storagePath?: string;
|
||||||
|
/** Enable persistence for memory adapter */
|
||||||
|
persist?: boolean;
|
||||||
|
/** Path for persistence file when using memory adapter */
|
||||||
|
persistPath?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Connection String Parsing
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export interface IParsedConnectionString {
|
||||||
|
protocol: 'tsmdb';
|
||||||
|
storageType: 'memory' | 'file';
|
||||||
|
options: {
|
||||||
|
persist?: string;
|
||||||
|
path?: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// CRUD Operation Options
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export interface IInsertOneOptions {
|
||||||
|
/** Session for transaction support */
|
||||||
|
session?: IClientSession;
|
||||||
|
/** Custom write concern */
|
||||||
|
writeConcern?: IWriteConcern;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IInsertManyOptions extends IInsertOneOptions {
|
||||||
|
/** If true, inserts are ordered and stop on first error */
|
||||||
|
ordered?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IFindOptions<TSchema = Document> {
|
||||||
|
/** Projection to apply */
|
||||||
|
projection?: Partial<Record<keyof TSchema | string, 0 | 1 | boolean>>;
|
||||||
|
/** Sort specification */
|
||||||
|
sort?: ISortSpecification;
|
||||||
|
/** Number of documents to skip */
|
||||||
|
skip?: number;
|
||||||
|
/** Maximum number of documents to return */
|
||||||
|
limit?: number;
|
||||||
|
/** Session for transaction support */
|
||||||
|
session?: IClientSession;
|
||||||
|
/** Hint for index usage */
|
||||||
|
hint?: string | Document;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IUpdateOptions {
|
||||||
|
/** Create document if it doesn't exist */
|
||||||
|
upsert?: boolean;
|
||||||
|
/** Session for transaction support */
|
||||||
|
session?: IClientSession;
|
||||||
|
/** Array filters for positional updates */
|
||||||
|
arrayFilters?: Document[];
|
||||||
|
/** Custom write concern */
|
||||||
|
writeConcern?: IWriteConcern;
|
||||||
|
/** Hint for index usage */
|
||||||
|
hint?: string | Document;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReplaceOptions extends IUpdateOptions {}
|
||||||
|
|
||||||
|
export interface IDeleteOptions {
|
||||||
|
/** Session for transaction support */
|
||||||
|
session?: IClientSession;
|
||||||
|
/** Custom write concern */
|
||||||
|
writeConcern?: IWriteConcern;
|
||||||
|
/** Hint for index usage */
|
||||||
|
hint?: string | Document;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IFindOneAndUpdateOptions extends IUpdateOptions {
|
||||||
|
/** Return the document before or after the update */
|
||||||
|
returnDocument?: 'before' | 'after';
|
||||||
|
/** Projection to apply */
|
||||||
|
projection?: Document;
|
||||||
|
/** Sort specification to determine which document to modify */
|
||||||
|
sort?: ISortSpecification;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IFindOneAndReplaceOptions extends IFindOneAndUpdateOptions {}
|
||||||
|
|
||||||
|
export interface IFindOneAndDeleteOptions {
|
||||||
|
/** Projection to apply */
|
||||||
|
projection?: Document;
|
||||||
|
/** Sort specification to determine which document to delete */
|
||||||
|
sort?: ISortSpecification;
|
||||||
|
/** Session for transaction support */
|
||||||
|
session?: IClientSession;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// CRUD Results
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export interface IInsertOneResult {
|
||||||
|
acknowledged: boolean;
|
||||||
|
insertedId: plugins.bson.ObjectId;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IInsertManyResult {
|
||||||
|
acknowledged: boolean;
|
||||||
|
insertedCount: number;
|
||||||
|
insertedIds: Record<number, plugins.bson.ObjectId>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IUpdateResult {
|
||||||
|
acknowledged: boolean;
|
||||||
|
matchedCount: number;
|
||||||
|
modifiedCount: number;
|
||||||
|
upsertedCount: number;
|
||||||
|
upsertedId: plugins.bson.ObjectId | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IDeleteResult {
|
||||||
|
acknowledged: boolean;
|
||||||
|
deletedCount: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IModifyResult<TSchema> {
|
||||||
|
value: TSchema | null;
|
||||||
|
ok: 1 | 0;
|
||||||
|
lastErrorObject?: {
|
||||||
|
n: number;
|
||||||
|
updatedExisting?: boolean;
|
||||||
|
upserted?: plugins.bson.ObjectId;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Sort and Index Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export type ISortDirection = 1 | -1 | 'asc' | 'desc' | 'ascending' | 'descending';
|
||||||
|
|
||||||
|
export type ISortSpecification = Record<string, ISortDirection> | [string, ISortDirection][];
|
||||||
|
|
||||||
|
export interface IIndexSpecification {
|
||||||
|
key: Record<string, 1 | -1 | 'text' | '2dsphere'>;
|
||||||
|
name?: string;
|
||||||
|
unique?: boolean;
|
||||||
|
sparse?: boolean;
|
||||||
|
expireAfterSeconds?: number;
|
||||||
|
background?: boolean;
|
||||||
|
partialFilterExpression?: Document;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IIndexInfo {
|
||||||
|
v: number;
|
||||||
|
key: Record<string, 1 | -1 | string>;
|
||||||
|
name: string;
|
||||||
|
unique?: boolean;
|
||||||
|
sparse?: boolean;
|
||||||
|
expireAfterSeconds?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ICreateIndexOptions {
|
||||||
|
unique?: boolean;
|
||||||
|
sparse?: boolean;
|
||||||
|
expireAfterSeconds?: number;
|
||||||
|
name?: string;
|
||||||
|
background?: boolean;
|
||||||
|
partialFilterExpression?: Document;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Write Concern
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export interface IWriteConcern {
|
||||||
|
w?: number | 'majority';
|
||||||
|
j?: boolean;
|
||||||
|
wtimeout?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Aggregation Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export interface IAggregateOptions {
|
||||||
|
/** Allow disk use for large aggregations */
|
||||||
|
allowDiskUse?: boolean;
|
||||||
|
/** Maximum time in ms */
|
||||||
|
maxTimeMS?: number;
|
||||||
|
/** Session for transaction support */
|
||||||
|
session?: IClientSession;
|
||||||
|
/** Batch size for cursor */
|
||||||
|
batchSize?: number;
|
||||||
|
/** Collation settings */
|
||||||
|
collation?: ICollation;
|
||||||
|
/** Hint for index usage */
|
||||||
|
hint?: string | Document;
|
||||||
|
/** Comment for profiling */
|
||||||
|
comment?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ICollation {
|
||||||
|
locale: string;
|
||||||
|
caseLevel?: boolean;
|
||||||
|
caseFirst?: string;
|
||||||
|
strength?: number;
|
||||||
|
numericOrdering?: boolean;
|
||||||
|
alternate?: string;
|
||||||
|
maxVariable?: string;
|
||||||
|
backwards?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Change Stream Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export interface IChangeStreamOptions {
|
||||||
|
/** Resume after this token */
|
||||||
|
resumeAfter?: IResumeToken;
|
||||||
|
/** Start at this operation time */
|
||||||
|
startAtOperationTime?: plugins.bson.Timestamp;
|
||||||
|
/** Start after this token */
|
||||||
|
startAfter?: IResumeToken;
|
||||||
|
/** Full document lookup mode */
|
||||||
|
fullDocument?: 'default' | 'updateLookup' | 'whenAvailable' | 'required';
|
||||||
|
/** Full document before change */
|
||||||
|
fullDocumentBeforeChange?: 'off' | 'whenAvailable' | 'required';
|
||||||
|
/** Batch size */
|
||||||
|
batchSize?: number;
|
||||||
|
/** Maximum await time in ms */
|
||||||
|
maxAwaitTimeMS?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IResumeToken {
|
||||||
|
_data: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ChangeStreamOperationType =
|
||||||
|
| 'insert'
|
||||||
|
| 'update'
|
||||||
|
| 'replace'
|
||||||
|
| 'delete'
|
||||||
|
| 'drop'
|
||||||
|
| 'rename'
|
||||||
|
| 'dropDatabase'
|
||||||
|
| 'invalidate';
|
||||||
|
|
||||||
|
export interface IChangeStreamDocument<TSchema = Document> {
|
||||||
|
_id: IResumeToken;
|
||||||
|
operationType: ChangeStreamOperationType;
|
||||||
|
fullDocument?: TSchema;
|
||||||
|
fullDocumentBeforeChange?: TSchema;
|
||||||
|
ns: {
|
||||||
|
db: string;
|
||||||
|
coll?: string;
|
||||||
|
};
|
||||||
|
documentKey?: { _id: plugins.bson.ObjectId };
|
||||||
|
updateDescription?: {
|
||||||
|
updatedFields?: Document;
|
||||||
|
removedFields?: string[];
|
||||||
|
truncatedArrays?: Array<{ field: string; newSize: number }>;
|
||||||
|
};
|
||||||
|
clusterTime?: plugins.bson.Timestamp;
|
||||||
|
txnNumber?: number;
|
||||||
|
lsid?: { id: plugins.bson.Binary; uid: plugins.bson.Binary };
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Transaction Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export interface IClientSession {
|
||||||
|
id: { id: plugins.bson.Binary };
|
||||||
|
inTransaction(): boolean;
|
||||||
|
startTransaction(options?: ITransactionOptions): void;
|
||||||
|
commitTransaction(): Promise<void>;
|
||||||
|
abortTransaction(): Promise<void>;
|
||||||
|
withTransaction<T>(fn: () => Promise<T>, options?: ITransactionOptions): Promise<T>;
|
||||||
|
endSession(): Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ITransactionOptions {
|
||||||
|
readConcern?: IReadConcern;
|
||||||
|
writeConcern?: IWriteConcern;
|
||||||
|
readPreference?: string;
|
||||||
|
maxCommitTimeMS?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReadConcern {
|
||||||
|
level: 'local' | 'available' | 'majority' | 'linearizable' | 'snapshot';
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Bulk Operation Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export interface IBulkWriteOptions {
|
||||||
|
ordered?: boolean;
|
||||||
|
session?: IClientSession;
|
||||||
|
writeConcern?: IWriteConcern;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IBulkWriteOperation<TSchema = Document> {
|
||||||
|
insertOne?: { document: TSchema };
|
||||||
|
updateOne?: { filter: Document; update: Document; upsert?: boolean; arrayFilters?: Document[]; hint?: Document | string };
|
||||||
|
updateMany?: { filter: Document; update: Document; upsert?: boolean; arrayFilters?: Document[]; hint?: Document | string };
|
||||||
|
replaceOne?: { filter: Document; replacement: TSchema; upsert?: boolean; hint?: Document | string };
|
||||||
|
deleteOne?: { filter: Document; hint?: Document | string };
|
||||||
|
deleteMany?: { filter: Document; hint?: Document | string };
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IBulkWriteResult {
|
||||||
|
acknowledged: boolean;
|
||||||
|
insertedCount: number;
|
||||||
|
matchedCount: number;
|
||||||
|
modifiedCount: number;
|
||||||
|
deletedCount: number;
|
||||||
|
upsertedCount: number;
|
||||||
|
insertedIds: Record<number, plugins.bson.ObjectId>;
|
||||||
|
upsertedIds: Record<number, plugins.bson.ObjectId>;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Storage Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export interface IStoredDocument extends Document {
|
||||||
|
_id: plugins.bson.ObjectId;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IOpLogEntry {
|
||||||
|
ts: plugins.bson.Timestamp;
|
||||||
|
op: 'i' | 'u' | 'd' | 'c' | 'n';
|
||||||
|
ns: string;
|
||||||
|
o: Document;
|
||||||
|
o2?: Document;
|
||||||
|
txnNumber?: number;
|
||||||
|
lsid?: { id: plugins.bson.Binary };
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Admin Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export interface IDatabaseInfo {
|
||||||
|
name: string;
|
||||||
|
sizeOnDisk: number;
|
||||||
|
empty: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ICollectionInfo {
|
||||||
|
name: string;
|
||||||
|
type: 'collection' | 'view';
|
||||||
|
options: Document;
|
||||||
|
info: {
|
||||||
|
readOnly: boolean;
|
||||||
|
uuid?: plugins.bson.Binary;
|
||||||
|
};
|
||||||
|
idIndex?: IIndexInfo;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IServerStatus {
|
||||||
|
host: string;
|
||||||
|
version: string;
|
||||||
|
process: string;
|
||||||
|
pid: number;
|
||||||
|
uptime: number;
|
||||||
|
uptimeMillis: number;
|
||||||
|
uptimeEstimate: number;
|
||||||
|
localTime: Date;
|
||||||
|
mem: {
|
||||||
|
resident: number;
|
||||||
|
virtual: number;
|
||||||
|
};
|
||||||
|
connections: {
|
||||||
|
current: number;
|
||||||
|
available: number;
|
||||||
|
totalCreated: number;
|
||||||
|
};
|
||||||
|
ok: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ICollectionStats {
|
||||||
|
ns: string;
|
||||||
|
count: number;
|
||||||
|
size: number;
|
||||||
|
avgObjSize: number;
|
||||||
|
storageSize: number;
|
||||||
|
totalIndexSize: number;
|
||||||
|
indexSizes: Record<string, number>;
|
||||||
|
nindexes: number;
|
||||||
|
ok: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Count Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export interface ICountDocumentsOptions {
|
||||||
|
skip?: number;
|
||||||
|
limit?: number;
|
||||||
|
session?: IClientSession;
|
||||||
|
hint?: string | Document;
|
||||||
|
maxTimeMS?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IEstimatedDocumentCountOptions {
|
||||||
|
maxTimeMS?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IDistinctOptions {
|
||||||
|
session?: IClientSession;
|
||||||
|
maxTimeMS?: number;
|
||||||
|
}
|
||||||
88
ts/ts_tsmdb/utils/checksum.ts
Normal file
88
ts/ts_tsmdb/utils/checksum.ts
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
/**
|
||||||
|
* CRC32 checksum utilities for data integrity
|
||||||
|
*/
|
||||||
|
|
||||||
|
// CRC32 lookup table
|
||||||
|
const CRC32_TABLE: number[] = [];
|
||||||
|
|
||||||
|
// Initialize the CRC32 table
|
||||||
|
function initCRC32Table(): void {
|
||||||
|
if (CRC32_TABLE.length > 0) return;
|
||||||
|
|
||||||
|
for (let i = 0; i < 256; i++) {
|
||||||
|
let crc = i;
|
||||||
|
for (let j = 0; j < 8; j++) {
|
||||||
|
crc = (crc & 1) ? (0xEDB88320 ^ (crc >>> 1)) : (crc >>> 1);
|
||||||
|
}
|
||||||
|
CRC32_TABLE[i] = crc >>> 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate CRC32 checksum for a string
|
||||||
|
*/
|
||||||
|
export function calculateCRC32(data: string): number {
|
||||||
|
initCRC32Table();
|
||||||
|
|
||||||
|
let crc = 0xFFFFFFFF;
|
||||||
|
for (let i = 0; i < data.length; i++) {
|
||||||
|
const byte = data.charCodeAt(i) & 0xFF;
|
||||||
|
crc = CRC32_TABLE[(crc ^ byte) & 0xFF] ^ (crc >>> 8);
|
||||||
|
}
|
||||||
|
return (~crc) >>> 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate CRC32 checksum for a Buffer
|
||||||
|
*/
|
||||||
|
export function calculateCRC32Buffer(data: Buffer): number {
|
||||||
|
initCRC32Table();
|
||||||
|
|
||||||
|
let crc = 0xFFFFFFFF;
|
||||||
|
for (let i = 0; i < data.length; i++) {
|
||||||
|
crc = CRC32_TABLE[(crc ^ data[i]) & 0xFF] ^ (crc >>> 8);
|
||||||
|
}
|
||||||
|
return (~crc) >>> 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate checksum for a document (serialized as JSON)
|
||||||
|
*/
|
||||||
|
export function calculateDocumentChecksum(doc: Record<string, any>): number {
|
||||||
|
// Exclude _checksum field from calculation
|
||||||
|
const { _checksum, ...docWithoutChecksum } = doc;
|
||||||
|
const json = JSON.stringify(docWithoutChecksum);
|
||||||
|
return calculateCRC32(json);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add checksum to a document
|
||||||
|
*/
|
||||||
|
export function addChecksum<T extends Record<string, any>>(doc: T): T & { _checksum: number } {
|
||||||
|
const checksum = calculateDocumentChecksum(doc);
|
||||||
|
return { ...doc, _checksum: checksum };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify checksum of a document
|
||||||
|
* Returns true if checksum is valid or if document has no checksum
|
||||||
|
*/
|
||||||
|
export function verifyChecksum(doc: Record<string, any>): boolean {
|
||||||
|
if (!('_checksum' in doc)) {
|
||||||
|
// No checksum to verify
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const storedChecksum = doc._checksum;
|
||||||
|
const calculatedChecksum = calculateDocumentChecksum(doc);
|
||||||
|
|
||||||
|
return storedChecksum === calculatedChecksum;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove checksum from a document
|
||||||
|
*/
|
||||||
|
export function removeChecksum<T extends Record<string, any>>(doc: T): Omit<T, '_checksum'> {
|
||||||
|
const { _checksum, ...docWithoutChecksum } = doc;
|
||||||
|
return docWithoutChecksum as Omit<T, '_checksum'>;
|
||||||
|
}
|
||||||
1
ts/ts_tsmdb/utils/index.ts
Normal file
1
ts/ts_tsmdb/utils/index.ts
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export * from './checksum.js';
|
||||||
@@ -1,14 +1,10 @@
|
|||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"experimentalDecorators": true,
|
|
||||||
"useDefineForClassFields": false,
|
|
||||||
"target": "ES2022",
|
"target": "ES2022",
|
||||||
"module": "NodeNext",
|
"module": "NodeNext",
|
||||||
"moduleResolution": "NodeNext",
|
"moduleResolution": "NodeNext",
|
||||||
"esModuleInterop": true,
|
"esModuleInterop": true,
|
||||||
"verbatimModuleSyntax": true,
|
"verbatimModuleSyntax": true
|
||||||
"baseUrl": ".",
|
|
||||||
"paths": {}
|
|
||||||
},
|
},
|
||||||
"exclude": [
|
"exclude": [
|
||||||
"dist_*/**/*.d.ts"
|
"dist_*/**/*.d.ts"
|
||||||
|
|||||||
Reference in New Issue
Block a user