feat(congodb): implement CongoDB MongoDB wire-protocol compatible in-memory server and APIs
This commit is contained in:
614
ts/congodb/server/handlers/AdminHandler.ts
Normal file
614
ts/congodb/server/handlers/AdminHandler.ts
Normal file
@@ -0,0 +1,614 @@
|
||||
import * as plugins from '../../congodb.plugins.js';
|
||||
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
||||
|
||||
/**
|
||||
* AdminHandler - Handles administrative commands
|
||||
*/
|
||||
export class AdminHandler implements ICommandHandler {
|
||||
async handle(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { command } = context;
|
||||
|
||||
// Determine which command to handle
|
||||
if (command.ping !== undefined) {
|
||||
return this.handlePing(context);
|
||||
} else if (command.listDatabases !== undefined) {
|
||||
return this.handleListDatabases(context);
|
||||
} else if (command.listCollections !== undefined) {
|
||||
return this.handleListCollections(context);
|
||||
} else if (command.drop !== undefined) {
|
||||
return this.handleDrop(context);
|
||||
} else if (command.dropDatabase !== undefined) {
|
||||
return this.handleDropDatabase(context);
|
||||
} else if (command.create !== undefined) {
|
||||
return this.handleCreate(context);
|
||||
} else if (command.serverStatus !== undefined) {
|
||||
return this.handleServerStatus(context);
|
||||
} else if (command.buildInfo !== undefined) {
|
||||
return this.handleBuildInfo(context);
|
||||
} else if (command.whatsmyuri !== undefined) {
|
||||
return this.handleWhatsMyUri(context);
|
||||
} else if (command.getLog !== undefined) {
|
||||
return this.handleGetLog(context);
|
||||
} else if (command.hostInfo !== undefined) {
|
||||
return this.handleHostInfo(context);
|
||||
} else if (command.replSetGetStatus !== undefined) {
|
||||
return this.handleReplSetGetStatus(context);
|
||||
} else if (command.saslStart !== undefined) {
|
||||
return this.handleSaslStart(context);
|
||||
} else if (command.saslContinue !== undefined) {
|
||||
return this.handleSaslContinue(context);
|
||||
} else if (command.endSessions !== undefined) {
|
||||
return this.handleEndSessions(context);
|
||||
} else if (command.abortTransaction !== undefined) {
|
||||
return this.handleAbortTransaction(context);
|
||||
} else if (command.commitTransaction !== undefined) {
|
||||
return this.handleCommitTransaction(context);
|
||||
} else if (command.collStats !== undefined) {
|
||||
return this.handleCollStats(context);
|
||||
} else if (command.dbStats !== undefined) {
|
||||
return this.handleDbStats(context);
|
||||
} else if (command.connectionStatus !== undefined) {
|
||||
return this.handleConnectionStatus(context);
|
||||
} else if (command.currentOp !== undefined) {
|
||||
return this.handleCurrentOp(context);
|
||||
} else if (command.collMod !== undefined) {
|
||||
return this.handleCollMod(context);
|
||||
} else if (command.renameCollection !== undefined) {
|
||||
return this.handleRenameCollection(context);
|
||||
}
|
||||
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'Unknown admin command',
|
||||
code: 59,
|
||||
codeName: 'CommandNotFound',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle ping command
|
||||
*/
|
||||
private async handlePing(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
return { ok: 1 };
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle listDatabases command
|
||||
*/
|
||||
private async handleListDatabases(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { storage, command } = context;
|
||||
|
||||
const dbNames = await storage.listDatabases();
|
||||
const nameOnly = command.nameOnly || false;
|
||||
|
||||
if (nameOnly) {
|
||||
return {
|
||||
ok: 1,
|
||||
databases: dbNames.map(name => ({ name })),
|
||||
};
|
||||
}
|
||||
|
||||
// Build database list with sizes
|
||||
const databases: plugins.bson.Document[] = [];
|
||||
let totalSize = 0;
|
||||
|
||||
for (const name of dbNames) {
|
||||
const collections = await storage.listCollections(name);
|
||||
let dbSize = 0;
|
||||
|
||||
for (const collName of collections) {
|
||||
const docs = await storage.findAll(name, collName);
|
||||
// Estimate size (rough approximation)
|
||||
dbSize += docs.reduce((sum, doc) => sum + JSON.stringify(doc).length, 0);
|
||||
}
|
||||
|
||||
totalSize += dbSize;
|
||||
|
||||
databases.push({
|
||||
name,
|
||||
sizeOnDisk: dbSize,
|
||||
empty: dbSize === 0,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
ok: 1,
|
||||
databases,
|
||||
totalSize,
|
||||
totalSizeMb: totalSize / (1024 * 1024),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle listCollections command
|
||||
*/
|
||||
private async handleListCollections(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { storage, database, command } = context;
|
||||
|
||||
const filter = command.filter || {};
|
||||
const nameOnly = command.nameOnly || false;
|
||||
const cursor = command.cursor || {};
|
||||
const batchSize = cursor.batchSize || 101;
|
||||
|
||||
const collNames = await storage.listCollections(database);
|
||||
|
||||
let collections: plugins.bson.Document[] = [];
|
||||
|
||||
for (const name of collNames) {
|
||||
// Apply name filter
|
||||
if (filter.name && filter.name !== name) {
|
||||
// Check regex
|
||||
if (filter.name.$regex) {
|
||||
const regex = new RegExp(filter.name.$regex, filter.name.$options);
|
||||
if (!regex.test(name)) continue;
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (nameOnly) {
|
||||
collections.push({ name });
|
||||
} else {
|
||||
collections.push({
|
||||
name,
|
||||
type: 'collection',
|
||||
options: {},
|
||||
info: {
|
||||
readOnly: false,
|
||||
uuid: new plugins.bson.UUID(),
|
||||
},
|
||||
idIndex: {
|
||||
v: 2,
|
||||
key: { _id: 1 },
|
||||
name: '_id_',
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
ok: 1,
|
||||
cursor: {
|
||||
id: plugins.bson.Long.fromNumber(0),
|
||||
ns: `${database}.$cmd.listCollections`,
|
||||
firstBatch: collections,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle drop command (drop collection)
|
||||
*/
|
||||
private async handleDrop(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { storage, database, command } = context;
|
||||
|
||||
const collection = command.drop;
|
||||
|
||||
const existed = await storage.dropCollection(database, collection);
|
||||
|
||||
if (!existed) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: `ns not found ${database}.${collection}`,
|
||||
code: 26,
|
||||
codeName: 'NamespaceNotFound',
|
||||
};
|
||||
}
|
||||
|
||||
return { ok: 1, ns: `${database}.${collection}` };
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle dropDatabase command
|
||||
*/
|
||||
private async handleDropDatabase(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { storage, database } = context;
|
||||
|
||||
await storage.dropDatabase(database);
|
||||
|
||||
return { ok: 1, dropped: database };
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle create command (create collection)
|
||||
*/
|
||||
private async handleCreate(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { storage, database, command } = context;
|
||||
|
||||
const collection = command.create;
|
||||
|
||||
// Check if already exists
|
||||
const exists = await storage.collectionExists(database, collection);
|
||||
if (exists) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: `Collection ${database}.${collection} already exists.`,
|
||||
code: 48,
|
||||
codeName: 'NamespaceExists',
|
||||
};
|
||||
}
|
||||
|
||||
await storage.createCollection(database, collection);
|
||||
|
||||
return { ok: 1 };
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle serverStatus command
|
||||
*/
|
||||
private async handleServerStatus(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { server } = context;
|
||||
|
||||
const uptime = server.getUptime();
|
||||
const connections = server.getConnectionCount();
|
||||
|
||||
return {
|
||||
ok: 1,
|
||||
host: `${server.host}:${server.port}`,
|
||||
version: '7.0.0',
|
||||
process: 'congodb',
|
||||
pid: process.pid,
|
||||
uptime,
|
||||
uptimeMillis: uptime * 1000,
|
||||
uptimeEstimate: uptime,
|
||||
localTime: new Date(),
|
||||
mem: {
|
||||
resident: Math.floor(process.memoryUsage().rss / (1024 * 1024)),
|
||||
virtual: Math.floor(process.memoryUsage().heapTotal / (1024 * 1024)),
|
||||
supported: true,
|
||||
},
|
||||
connections: {
|
||||
current: connections,
|
||||
available: 1000 - connections,
|
||||
totalCreated: connections,
|
||||
active: connections,
|
||||
},
|
||||
network: {
|
||||
bytesIn: 0,
|
||||
bytesOut: 0,
|
||||
numRequests: 0,
|
||||
},
|
||||
storageEngine: {
|
||||
name: 'congodb',
|
||||
supportsCommittedReads: true,
|
||||
persistent: false,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle buildInfo command
|
||||
*/
|
||||
private async handleBuildInfo(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
return {
|
||||
ok: 1,
|
||||
version: '7.0.0',
|
||||
gitVersion: 'congodb',
|
||||
modules: [],
|
||||
allocator: 'system',
|
||||
javascriptEngine: 'none',
|
||||
sysInfo: 'deprecated',
|
||||
versionArray: [7, 0, 0, 0],
|
||||
openssl: {
|
||||
running: 'disabled',
|
||||
compiled: 'disabled',
|
||||
},
|
||||
buildEnvironment: {
|
||||
distmod: 'congodb',
|
||||
distarch: process.arch,
|
||||
cc: '',
|
||||
ccflags: '',
|
||||
cxx: '',
|
||||
cxxflags: '',
|
||||
linkflags: '',
|
||||
target_arch: process.arch,
|
||||
target_os: process.platform,
|
||||
},
|
||||
bits: 64,
|
||||
debug: false,
|
||||
maxBsonObjectSize: 16777216,
|
||||
storageEngines: ['congodb'],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle whatsmyuri command
|
||||
*/
|
||||
private async handleWhatsMyUri(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { server } = context;
|
||||
return {
|
||||
ok: 1,
|
||||
you: `127.0.0.1:${server.port}`,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle getLog command
|
||||
*/
|
||||
private async handleGetLog(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { command } = context;
|
||||
|
||||
if (command.getLog === '*') {
|
||||
return {
|
||||
ok: 1,
|
||||
names: ['global', 'startupWarnings'],
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
ok: 1,
|
||||
totalLinesWritten: 0,
|
||||
log: [],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle hostInfo command
|
||||
*/
|
||||
private async handleHostInfo(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
return {
|
||||
ok: 1,
|
||||
system: {
|
||||
currentTime: new Date(),
|
||||
hostname: 'localhost',
|
||||
cpuAddrSize: 64,
|
||||
memSizeMB: Math.floor(process.memoryUsage().heapTotal / (1024 * 1024)),
|
||||
numCores: 1,
|
||||
cpuArch: process.arch,
|
||||
numaEnabled: false,
|
||||
},
|
||||
os: {
|
||||
type: process.platform,
|
||||
name: process.platform,
|
||||
version: process.version,
|
||||
},
|
||||
extra: {},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle replSetGetStatus command
|
||||
*/
|
||||
private async handleReplSetGetStatus(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
// We're standalone, not a replica set
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'not running with --replSet',
|
||||
code: 76,
|
||||
codeName: 'NoReplicationEnabled',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle saslStart command (authentication)
|
||||
*/
|
||||
private async handleSaslStart(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
// We don't require authentication, but we need to respond properly
|
||||
// to let drivers know auth is "successful"
|
||||
return {
|
||||
ok: 1,
|
||||
conversationId: 1,
|
||||
done: true,
|
||||
payload: Buffer.from([]),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle saslContinue command
|
||||
*/
|
||||
private async handleSaslContinue(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
return {
|
||||
ok: 1,
|
||||
conversationId: 1,
|
||||
done: true,
|
||||
payload: Buffer.from([]),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle endSessions command
|
||||
*/
|
||||
private async handleEndSessions(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
return { ok: 1 };
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle abortTransaction command
|
||||
*/
|
||||
private async handleAbortTransaction(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
// Transactions are not fully supported, but acknowledge the command
|
||||
return { ok: 1 };
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle commitTransaction command
|
||||
*/
|
||||
private async handleCommitTransaction(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
// Transactions are not fully supported, but acknowledge the command
|
||||
return { ok: 1 };
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle collStats command
|
||||
*/
|
||||
private async handleCollStats(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { storage, database, command } = context;
|
||||
|
||||
const collection = command.collStats;
|
||||
|
||||
const exists = await storage.collectionExists(database, collection);
|
||||
if (!exists) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: `ns not found ${database}.${collection}`,
|
||||
code: 26,
|
||||
codeName: 'NamespaceNotFound',
|
||||
};
|
||||
}
|
||||
|
||||
const docs = await storage.findAll(database, collection);
|
||||
const size = docs.reduce((sum, doc) => sum + JSON.stringify(doc).length, 0);
|
||||
const count = docs.length;
|
||||
const avgObjSize = count > 0 ? size / count : 0;
|
||||
|
||||
const indexes = await storage.getIndexes(database, collection);
|
||||
|
||||
return {
|
||||
ok: 1,
|
||||
ns: `${database}.${collection}`,
|
||||
count,
|
||||
size,
|
||||
avgObjSize,
|
||||
storageSize: size,
|
||||
totalIndexSize: 0,
|
||||
indexSizes: indexes.reduce((acc: any, idx: any) => {
|
||||
acc[idx.name] = 0;
|
||||
return acc;
|
||||
}, {}),
|
||||
nindexes: indexes.length,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle dbStats command
|
||||
*/
|
||||
private async handleDbStats(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { storage, database } = context;
|
||||
|
||||
const collections = await storage.listCollections(database);
|
||||
let totalSize = 0;
|
||||
let totalObjects = 0;
|
||||
|
||||
for (const collName of collections) {
|
||||
const docs = await storage.findAll(database, collName);
|
||||
totalObjects += docs.length;
|
||||
totalSize += docs.reduce((sum, doc) => sum + JSON.stringify(doc).length, 0);
|
||||
}
|
||||
|
||||
return {
|
||||
ok: 1,
|
||||
db: database,
|
||||
collections: collections.length,
|
||||
views: 0,
|
||||
objects: totalObjects,
|
||||
avgObjSize: totalObjects > 0 ? totalSize / totalObjects : 0,
|
||||
dataSize: totalSize,
|
||||
storageSize: totalSize,
|
||||
indexes: collections.length, // At least _id index per collection
|
||||
indexSize: 0,
|
||||
totalSize,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle connectionStatus command
|
||||
*/
|
||||
private async handleConnectionStatus(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
return {
|
||||
ok: 1,
|
||||
authInfo: {
|
||||
authenticatedUsers: [],
|
||||
authenticatedUserRoles: [],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle currentOp command
|
||||
*/
|
||||
private async handleCurrentOp(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
return {
|
||||
ok: 1,
|
||||
inprog: [],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle collMod command
|
||||
*/
|
||||
private async handleCollMod(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
// We don't support modifying collection options, but acknowledge the command
|
||||
return { ok: 1 };
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle renameCollection command
|
||||
*/
|
||||
private async handleRenameCollection(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { storage, command } = context;
|
||||
|
||||
const from = command.renameCollection;
|
||||
const to = command.to;
|
||||
const dropTarget = command.dropTarget || false;
|
||||
|
||||
if (!from || !to) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'renameCollection requires both source and target',
|
||||
code: 2,
|
||||
codeName: 'BadValue',
|
||||
};
|
||||
}
|
||||
|
||||
// Parse namespace (format: "db.collection")
|
||||
const fromParts = from.split('.');
|
||||
const toParts = to.split('.');
|
||||
|
||||
if (fromParts.length < 2 || toParts.length < 2) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'Invalid namespace format',
|
||||
code: 73,
|
||||
codeName: 'InvalidNamespace',
|
||||
};
|
||||
}
|
||||
|
||||
const fromDb = fromParts[0];
|
||||
const fromColl = fromParts.slice(1).join('.');
|
||||
const toDb = toParts[0];
|
||||
const toColl = toParts.slice(1).join('.');
|
||||
|
||||
// Check if source exists
|
||||
const sourceExists = await storage.collectionExists(fromDb, fromColl);
|
||||
if (!sourceExists) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: `source namespace ${from} does not exist`,
|
||||
code: 26,
|
||||
codeName: 'NamespaceNotFound',
|
||||
};
|
||||
}
|
||||
|
||||
// Check if target exists
|
||||
const targetExists = await storage.collectionExists(toDb, toColl);
|
||||
if (targetExists) {
|
||||
if (dropTarget) {
|
||||
await storage.dropCollection(toDb, toColl);
|
||||
} else {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: `target namespace ${to} already exists`,
|
||||
code: 48,
|
||||
codeName: 'NamespaceExists',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Same database rename
|
||||
if (fromDb === toDb) {
|
||||
await storage.renameCollection(fromDb, fromColl, toColl);
|
||||
} else {
|
||||
// Cross-database rename: copy documents then drop source
|
||||
await storage.createCollection(toDb, toColl);
|
||||
const docs = await storage.findAll(fromDb, fromColl);
|
||||
|
||||
for (const doc of docs) {
|
||||
await storage.insertOne(toDb, toColl, doc);
|
||||
}
|
||||
|
||||
await storage.dropCollection(fromDb, fromColl);
|
||||
}
|
||||
|
||||
return { ok: 1 };
|
||||
}
|
||||
}
|
||||
342
ts/congodb/server/handlers/AggregateHandler.ts
Normal file
342
ts/congodb/server/handlers/AggregateHandler.ts
Normal file
@@ -0,0 +1,342 @@
|
||||
import * as plugins from '../../congodb.plugins.js';
|
||||
import type { ICommandHandler, IHandlerContext, ICursorState } from '../CommandRouter.js';
|
||||
import { AggregationEngine } from '../../engine/AggregationEngine.js';
|
||||
|
||||
/**
|
||||
* AggregateHandler - Handles aggregate command
|
||||
*/
|
||||
export class AggregateHandler implements ICommandHandler {
|
||||
private cursors: Map<bigint, ICursorState>;
|
||||
private nextCursorId: () => bigint;
|
||||
|
||||
constructor(
|
||||
cursors: Map<bigint, ICursorState>,
|
||||
nextCursorId: () => bigint
|
||||
) {
|
||||
this.cursors = cursors;
|
||||
this.nextCursorId = nextCursorId;
|
||||
}
|
||||
|
||||
async handle(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { storage, database, command } = context;
|
||||
|
||||
const collection = command.aggregate;
|
||||
const pipeline = command.pipeline || [];
|
||||
const cursor = command.cursor || {};
|
||||
const batchSize = cursor.batchSize || 101;
|
||||
|
||||
// Validate
|
||||
if (typeof collection !== 'string' && collection !== 1) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'aggregate command requires a collection name or 1',
|
||||
code: 2,
|
||||
codeName: 'BadValue',
|
||||
};
|
||||
}
|
||||
|
||||
if (!Array.isArray(pipeline)) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'pipeline must be an array',
|
||||
code: 2,
|
||||
codeName: 'BadValue',
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
// Get source documents
|
||||
let documents: plugins.bson.Document[] = [];
|
||||
|
||||
if (collection === 1 || collection === '1') {
|
||||
// Database-level aggregation (e.g., $listLocalSessions)
|
||||
documents = [];
|
||||
} else {
|
||||
// Collection-level aggregation
|
||||
const exists = await storage.collectionExists(database, collection);
|
||||
if (exists) {
|
||||
documents = await storage.findAll(database, collection);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle $lookup and $graphLookup stages that reference other collections
|
||||
const processedPipeline = await this.preprocessPipeline(
|
||||
storage,
|
||||
database,
|
||||
pipeline,
|
||||
documents
|
||||
);
|
||||
|
||||
// Run aggregation
|
||||
let results: plugins.bson.Document[];
|
||||
|
||||
// Check for special stages that we handle manually
|
||||
if (this.hasSpecialStages(pipeline)) {
|
||||
results = await this.executeWithSpecialStages(
|
||||
storage,
|
||||
database,
|
||||
documents,
|
||||
pipeline
|
||||
);
|
||||
} else {
|
||||
results = AggregationEngine.aggregate(documents as any, processedPipeline);
|
||||
}
|
||||
|
||||
// Handle $out and $merge stages
|
||||
const lastStage = pipeline[pipeline.length - 1];
|
||||
if (lastStage && lastStage.$out) {
|
||||
await this.handleOut(storage, database, results, lastStage.$out);
|
||||
return { ok: 1, cursor: { id: plugins.bson.Long.fromNumber(0), ns: `${database}.${collection}`, firstBatch: [] } };
|
||||
}
|
||||
|
||||
if (lastStage && lastStage.$merge) {
|
||||
await this.handleMerge(storage, database, results, lastStage.$merge);
|
||||
return { ok: 1, cursor: { id: plugins.bson.Long.fromNumber(0), ns: `${database}.${collection}`, firstBatch: [] } };
|
||||
}
|
||||
|
||||
// Build cursor response
|
||||
const effectiveBatchSize = Math.min(batchSize, results.length);
|
||||
const firstBatch = results.slice(0, effectiveBatchSize);
|
||||
const remaining = results.slice(effectiveBatchSize);
|
||||
|
||||
let cursorId = BigInt(0);
|
||||
if (remaining.length > 0) {
|
||||
cursorId = this.nextCursorId();
|
||||
this.cursors.set(cursorId, {
|
||||
id: cursorId,
|
||||
database,
|
||||
collection: typeof collection === 'string' ? collection : '$cmd.aggregate',
|
||||
documents: remaining,
|
||||
position: 0,
|
||||
batchSize,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
ok: 1,
|
||||
cursor: {
|
||||
id: plugins.bson.Long.fromBigInt(cursorId),
|
||||
ns: `${database}.${typeof collection === 'string' ? collection : '$cmd.aggregate'}`,
|
||||
firstBatch,
|
||||
},
|
||||
};
|
||||
} catch (error: any) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: error.message || 'Aggregation failed',
|
||||
code: 1,
|
||||
codeName: 'InternalError',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Preprocess pipeline to handle cross-collection lookups
|
||||
*/
|
||||
private async preprocessPipeline(
|
||||
storage: any,
|
||||
database: string,
|
||||
pipeline: plugins.bson.Document[],
|
||||
documents: plugins.bson.Document[]
|
||||
): Promise<plugins.bson.Document[]> {
|
||||
// For now, return the pipeline as-is
|
||||
// Cross-collection lookups are handled in executeWithSpecialStages
|
||||
return pipeline;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if pipeline has stages that need special handling
|
||||
*/
|
||||
private hasSpecialStages(pipeline: plugins.bson.Document[]): boolean {
|
||||
return pipeline.some(stage =>
|
||||
stage.$lookup ||
|
||||
stage.$graphLookup ||
|
||||
stage.$unionWith
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute pipeline with special stage handling
|
||||
*/
|
||||
private async executeWithSpecialStages(
|
||||
storage: any,
|
||||
database: string,
|
||||
documents: plugins.bson.Document[],
|
||||
pipeline: plugins.bson.Document[]
|
||||
): Promise<plugins.bson.Document[]> {
|
||||
let results: plugins.bson.Document[] = [...documents];
|
||||
|
||||
for (const stage of pipeline) {
|
||||
if (stage.$lookup) {
|
||||
const lookupSpec = stage.$lookup;
|
||||
const fromCollection = lookupSpec.from;
|
||||
|
||||
// Get foreign collection documents
|
||||
const foreignExists = await storage.collectionExists(database, fromCollection);
|
||||
const foreignDocs = foreignExists
|
||||
? await storage.findAll(database, fromCollection)
|
||||
: [];
|
||||
|
||||
results = AggregationEngine.executeLookup(results as any, lookupSpec, foreignDocs);
|
||||
} else if (stage.$graphLookup) {
|
||||
const graphLookupSpec = stage.$graphLookup;
|
||||
const fromCollection = graphLookupSpec.from;
|
||||
|
||||
const foreignExists = await storage.collectionExists(database, fromCollection);
|
||||
const foreignDocs = foreignExists
|
||||
? await storage.findAll(database, fromCollection)
|
||||
: [];
|
||||
|
||||
results = AggregationEngine.executeGraphLookup(results as any, graphLookupSpec, foreignDocs);
|
||||
} else if (stage.$unionWith) {
|
||||
let unionSpec = stage.$unionWith;
|
||||
let unionColl: string;
|
||||
let unionPipeline: plugins.bson.Document[] | undefined;
|
||||
|
||||
if (typeof unionSpec === 'string') {
|
||||
unionColl = unionSpec;
|
||||
} else {
|
||||
unionColl = unionSpec.coll;
|
||||
unionPipeline = unionSpec.pipeline;
|
||||
}
|
||||
|
||||
const unionExists = await storage.collectionExists(database, unionColl);
|
||||
const unionDocs = unionExists
|
||||
? await storage.findAll(database, unionColl)
|
||||
: [];
|
||||
|
||||
results = AggregationEngine.executeUnionWith(results as any, unionDocs, unionPipeline);
|
||||
} else if (stage.$facet) {
|
||||
// Execute each facet pipeline separately
|
||||
const facetResults: plugins.bson.Document = {};
|
||||
|
||||
for (const [facetName, facetPipeline] of Object.entries(stage.$facet)) {
|
||||
const facetDocs = await this.executeWithSpecialStages(
|
||||
storage,
|
||||
database,
|
||||
results,
|
||||
facetPipeline as plugins.bson.Document[]
|
||||
);
|
||||
facetResults[facetName] = facetDocs;
|
||||
}
|
||||
|
||||
results = [facetResults];
|
||||
} else {
|
||||
// Regular stage - pass to mingo
|
||||
results = AggregationEngine.aggregate(results as any, [stage]);
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle $out stage - write results to a collection
|
||||
*/
|
||||
private async handleOut(
|
||||
storage: any,
|
||||
database: string,
|
||||
results: plugins.bson.Document[],
|
||||
outSpec: string | { db?: string; coll: string }
|
||||
): Promise<void> {
|
||||
let targetDb = database;
|
||||
let targetColl: string;
|
||||
|
||||
if (typeof outSpec === 'string') {
|
||||
targetColl = outSpec;
|
||||
} else {
|
||||
targetDb = outSpec.db || database;
|
||||
targetColl = outSpec.coll;
|
||||
}
|
||||
|
||||
// Drop existing collection
|
||||
await storage.dropCollection(targetDb, targetColl);
|
||||
|
||||
// Create new collection and insert results
|
||||
await storage.createCollection(targetDb, targetColl);
|
||||
|
||||
for (const doc of results) {
|
||||
if (!doc._id) {
|
||||
doc._id = new plugins.bson.ObjectId();
|
||||
}
|
||||
await storage.insertOne(targetDb, targetColl, doc);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle $merge stage - merge results into a collection
|
||||
*/
|
||||
private async handleMerge(
|
||||
storage: any,
|
||||
database: string,
|
||||
results: plugins.bson.Document[],
|
||||
mergeSpec: any
|
||||
): Promise<void> {
|
||||
let targetDb = database;
|
||||
let targetColl: string;
|
||||
|
||||
if (typeof mergeSpec === 'string') {
|
||||
targetColl = mergeSpec;
|
||||
} else if (typeof mergeSpec.into === 'string') {
|
||||
targetColl = mergeSpec.into;
|
||||
} else {
|
||||
targetDb = mergeSpec.into.db || database;
|
||||
targetColl = mergeSpec.into.coll;
|
||||
}
|
||||
|
||||
const on = mergeSpec.on || '_id';
|
||||
const whenMatched = mergeSpec.whenMatched || 'merge';
|
||||
const whenNotMatched = mergeSpec.whenNotMatched || 'insert';
|
||||
|
||||
// Ensure target collection exists
|
||||
await storage.createCollection(targetDb, targetColl);
|
||||
|
||||
for (const doc of results) {
|
||||
// Find matching document
|
||||
const existingDocs = await storage.findAll(targetDb, targetColl);
|
||||
const onFields = Array.isArray(on) ? on : [on];
|
||||
|
||||
let matchingDoc = null;
|
||||
for (const existing of existingDocs) {
|
||||
let matches = true;
|
||||
for (const field of onFields) {
|
||||
if (JSON.stringify(existing[field]) !== JSON.stringify(doc[field])) {
|
||||
matches = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (matches) {
|
||||
matchingDoc = existing;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (matchingDoc) {
|
||||
// Handle whenMatched
|
||||
if (whenMatched === 'replace') {
|
||||
await storage.updateById(targetDb, targetColl, matchingDoc._id, doc);
|
||||
} else if (whenMatched === 'keepExisting') {
|
||||
// Do nothing
|
||||
} else if (whenMatched === 'merge') {
|
||||
const merged = { ...matchingDoc, ...doc };
|
||||
await storage.updateById(targetDb, targetColl, matchingDoc._id, merged);
|
||||
} else if (whenMatched === 'fail') {
|
||||
throw new Error('Document matched but whenMatched is fail');
|
||||
}
|
||||
} else {
|
||||
// Handle whenNotMatched
|
||||
if (whenNotMatched === 'insert') {
|
||||
if (!doc._id) {
|
||||
doc._id = new plugins.bson.ObjectId();
|
||||
}
|
||||
await storage.insertOne(targetDb, targetColl, doc);
|
||||
} else if (whenNotMatched === 'discard') {
|
||||
// Do nothing
|
||||
} else if (whenNotMatched === 'fail') {
|
||||
throw new Error('Document not matched but whenNotMatched is fail');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
100
ts/congodb/server/handlers/DeleteHandler.ts
Normal file
100
ts/congodb/server/handlers/DeleteHandler.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
import * as plugins from '../../congodb.plugins.js';
|
||||
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
||||
import { QueryEngine } from '../../engine/QueryEngine.js';
|
||||
|
||||
/**
|
||||
* DeleteHandler - Handles delete commands
|
||||
*/
|
||||
export class DeleteHandler implements ICommandHandler {
|
||||
async handle(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { storage, database, command, documentSequences } = context;
|
||||
|
||||
const collection = command.delete;
|
||||
if (typeof collection !== 'string') {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'delete command requires a collection name',
|
||||
code: 2,
|
||||
codeName: 'BadValue',
|
||||
};
|
||||
}
|
||||
|
||||
// Get deletes from command or document sequences
|
||||
let deletes: plugins.bson.Document[] = command.deletes || [];
|
||||
|
||||
// Check for OP_MSG document sequences
|
||||
if (documentSequences && documentSequences.has('deletes')) {
|
||||
deletes = documentSequences.get('deletes')!;
|
||||
}
|
||||
|
||||
if (!Array.isArray(deletes) || deletes.length === 0) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'delete command requires deletes array',
|
||||
code: 2,
|
||||
codeName: 'BadValue',
|
||||
};
|
||||
}
|
||||
|
||||
const ordered = command.ordered !== false;
|
||||
const writeErrors: plugins.bson.Document[] = [];
|
||||
let totalDeleted = 0;
|
||||
|
||||
// Check if collection exists
|
||||
const exists = await storage.collectionExists(database, collection);
|
||||
if (!exists) {
|
||||
// Collection doesn't exist, return success with 0 deleted
|
||||
return { ok: 1, n: 0 };
|
||||
}
|
||||
|
||||
for (let i = 0; i < deletes.length; i++) {
|
||||
const deleteSpec = deletes[i];
|
||||
const filter = deleteSpec.q || deleteSpec.filter || {};
|
||||
const limit = deleteSpec.limit;
|
||||
|
||||
// limit: 0 means delete all matching, limit: 1 means delete one
|
||||
const deleteAll = limit === 0;
|
||||
|
||||
try {
|
||||
// Get all documents
|
||||
const documents = await storage.findAll(database, collection);
|
||||
|
||||
// Apply filter
|
||||
const matchingDocs = QueryEngine.filter(documents, filter);
|
||||
|
||||
if (matchingDocs.length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Determine which documents to delete
|
||||
const docsToDelete = deleteAll ? matchingDocs : matchingDocs.slice(0, 1);
|
||||
|
||||
// Delete the documents
|
||||
const idsToDelete = docsToDelete.map(doc => doc._id);
|
||||
const deleted = await storage.deleteByIds(database, collection, idsToDelete);
|
||||
totalDeleted += deleted;
|
||||
} catch (error: any) {
|
||||
writeErrors.push({
|
||||
index: i,
|
||||
code: error.code || 1,
|
||||
errmsg: error.message || 'Delete failed',
|
||||
});
|
||||
|
||||
if (ordered) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const response: plugins.bson.Document = {
|
||||
ok: 1,
|
||||
n: totalDeleted,
|
||||
};
|
||||
|
||||
if (writeErrors.length > 0) {
|
||||
response.writeErrors = writeErrors;
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
}
|
||||
301
ts/congodb/server/handlers/FindHandler.ts
Normal file
301
ts/congodb/server/handlers/FindHandler.ts
Normal file
@@ -0,0 +1,301 @@
|
||||
import * as plugins from '../../congodb.plugins.js';
|
||||
import type { ICommandHandler, IHandlerContext, ICursorState } from '../CommandRouter.js';
|
||||
import { QueryEngine } from '../../engine/QueryEngine.js';
|
||||
|
||||
/**
|
||||
* FindHandler - Handles find, getMore, killCursors, count, distinct commands
|
||||
*/
|
||||
export class FindHandler implements ICommandHandler {
|
||||
private cursors: Map<bigint, ICursorState>;
|
||||
private nextCursorId: () => bigint;
|
||||
|
||||
constructor(
|
||||
cursors: Map<bigint, ICursorState>,
|
||||
nextCursorId: () => bigint
|
||||
) {
|
||||
this.cursors = cursors;
|
||||
this.nextCursorId = nextCursorId;
|
||||
}
|
||||
|
||||
async handle(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { command } = context;
|
||||
|
||||
// Determine which operation to perform
|
||||
if (command.find) {
|
||||
return this.handleFind(context);
|
||||
} else if (command.getMore !== undefined) {
|
||||
return this.handleGetMore(context);
|
||||
} else if (command.killCursors) {
|
||||
return this.handleKillCursors(context);
|
||||
} else if (command.count) {
|
||||
return this.handleCount(context);
|
||||
} else if (command.distinct) {
|
||||
return this.handleDistinct(context);
|
||||
}
|
||||
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'Unknown find-related command',
|
||||
code: 59,
|
||||
codeName: 'CommandNotFound',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle find command
|
||||
*/
|
||||
private async handleFind(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { storage, database, command } = context;
|
||||
|
||||
const collection = command.find;
|
||||
const filter = command.filter || {};
|
||||
const projection = command.projection;
|
||||
const sort = command.sort;
|
||||
const skip = command.skip || 0;
|
||||
const limit = command.limit || 0;
|
||||
const batchSize = command.batchSize || 101;
|
||||
const singleBatch = command.singleBatch || false;
|
||||
|
||||
// Ensure collection exists
|
||||
const exists = await storage.collectionExists(database, collection);
|
||||
if (!exists) {
|
||||
// Return empty cursor for non-existent collection
|
||||
return {
|
||||
ok: 1,
|
||||
cursor: {
|
||||
id: plugins.bson.Long.fromNumber(0),
|
||||
ns: `${database}.${collection}`,
|
||||
firstBatch: [],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Get all documents
|
||||
let documents = await storage.findAll(database, collection);
|
||||
|
||||
// Apply filter
|
||||
documents = QueryEngine.filter(documents, filter);
|
||||
|
||||
// Apply sort
|
||||
if (sort) {
|
||||
documents = QueryEngine.sort(documents, sort);
|
||||
}
|
||||
|
||||
// Apply skip
|
||||
if (skip > 0) {
|
||||
documents = documents.slice(skip);
|
||||
}
|
||||
|
||||
// Apply limit
|
||||
if (limit > 0) {
|
||||
documents = documents.slice(0, limit);
|
||||
}
|
||||
|
||||
// Apply projection
|
||||
if (projection) {
|
||||
documents = QueryEngine.project(documents, projection) as any[];
|
||||
}
|
||||
|
||||
// Determine how many documents to return in first batch
|
||||
const effectiveBatchSize = Math.min(batchSize, documents.length);
|
||||
const firstBatch = documents.slice(0, effectiveBatchSize);
|
||||
const remaining = documents.slice(effectiveBatchSize);
|
||||
|
||||
// Create cursor if there are more documents
|
||||
let cursorId = BigInt(0);
|
||||
if (remaining.length > 0 && !singleBatch) {
|
||||
cursorId = this.nextCursorId();
|
||||
this.cursors.set(cursorId, {
|
||||
id: cursorId,
|
||||
database,
|
||||
collection,
|
||||
documents: remaining,
|
||||
position: 0,
|
||||
batchSize,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
ok: 1,
|
||||
cursor: {
|
||||
id: plugins.bson.Long.fromBigInt(cursorId),
|
||||
ns: `${database}.${collection}`,
|
||||
firstBatch,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle getMore command
|
||||
*/
|
||||
private async handleGetMore(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { database, command } = context;
|
||||
|
||||
const cursorIdInput = command.getMore;
|
||||
const collection = command.collection;
|
||||
const batchSize = command.batchSize || 101;
|
||||
|
||||
// Convert cursorId to bigint
|
||||
let cursorId: bigint;
|
||||
if (typeof cursorIdInput === 'bigint') {
|
||||
cursorId = cursorIdInput;
|
||||
} else if (cursorIdInput instanceof plugins.bson.Long) {
|
||||
cursorId = cursorIdInput.toBigInt();
|
||||
} else {
|
||||
cursorId = BigInt(cursorIdInput);
|
||||
}
|
||||
|
||||
const cursor = this.cursors.get(cursorId);
|
||||
if (!cursor) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: `cursor id ${cursorId} not found`,
|
||||
code: 43,
|
||||
codeName: 'CursorNotFound',
|
||||
};
|
||||
}
|
||||
|
||||
// Verify namespace
|
||||
if (cursor.database !== database || cursor.collection !== collection) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'cursor namespace mismatch',
|
||||
code: 43,
|
||||
codeName: 'CursorNotFound',
|
||||
};
|
||||
}
|
||||
|
||||
// Get next batch
|
||||
const start = cursor.position;
|
||||
const end = Math.min(start + batchSize, cursor.documents.length);
|
||||
const nextBatch = cursor.documents.slice(start, end);
|
||||
cursor.position = end;
|
||||
|
||||
// Check if cursor is exhausted
|
||||
let returnCursorId = cursorId;
|
||||
if (cursor.position >= cursor.documents.length) {
|
||||
this.cursors.delete(cursorId);
|
||||
returnCursorId = BigInt(0);
|
||||
}
|
||||
|
||||
return {
|
||||
ok: 1,
|
||||
cursor: {
|
||||
id: plugins.bson.Long.fromBigInt(returnCursorId),
|
||||
ns: `${database}.${collection}`,
|
||||
nextBatch,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle killCursors command
|
||||
*/
|
||||
private async handleKillCursors(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { command } = context;
|
||||
|
||||
const collection = command.killCursors;
|
||||
const cursorIds = command.cursors || [];
|
||||
|
||||
const cursorsKilled: plugins.bson.Long[] = [];
|
||||
const cursorsNotFound: plugins.bson.Long[] = [];
|
||||
const cursorsUnknown: plugins.bson.Long[] = [];
|
||||
|
||||
for (const idInput of cursorIds) {
|
||||
let cursorId: bigint;
|
||||
if (typeof idInput === 'bigint') {
|
||||
cursorId = idInput;
|
||||
} else if (idInput instanceof plugins.bson.Long) {
|
||||
cursorId = idInput.toBigInt();
|
||||
} else {
|
||||
cursorId = BigInt(idInput);
|
||||
}
|
||||
|
||||
if (this.cursors.has(cursorId)) {
|
||||
this.cursors.delete(cursorId);
|
||||
cursorsKilled.push(plugins.bson.Long.fromBigInt(cursorId));
|
||||
} else {
|
||||
cursorsNotFound.push(plugins.bson.Long.fromBigInt(cursorId));
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
ok: 1,
|
||||
cursorsKilled,
|
||||
cursorsNotFound,
|
||||
cursorsUnknown,
|
||||
cursorsAlive: [],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle count command
|
||||
*/
|
||||
private async handleCount(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { storage, database, command } = context;
|
||||
|
||||
const collection = command.count;
|
||||
const query = command.query || {};
|
||||
const skip = command.skip || 0;
|
||||
const limit = command.limit || 0;
|
||||
|
||||
// Check if collection exists
|
||||
const exists = await storage.collectionExists(database, collection);
|
||||
if (!exists) {
|
||||
return { ok: 1, n: 0 };
|
||||
}
|
||||
|
||||
// Get all documents
|
||||
let documents = await storage.findAll(database, collection);
|
||||
|
||||
// Apply filter
|
||||
documents = QueryEngine.filter(documents, query);
|
||||
|
||||
// Apply skip
|
||||
if (skip > 0) {
|
||||
documents = documents.slice(skip);
|
||||
}
|
||||
|
||||
// Apply limit
|
||||
if (limit > 0) {
|
||||
documents = documents.slice(0, limit);
|
||||
}
|
||||
|
||||
return { ok: 1, n: documents.length };
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle distinct command
|
||||
*/
|
||||
private async handleDistinct(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { storage, database, command } = context;
|
||||
|
||||
const collection = command.distinct;
|
||||
const key = command.key;
|
||||
const query = command.query || {};
|
||||
|
||||
if (!key) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'distinct requires a key',
|
||||
code: 2,
|
||||
codeName: 'BadValue',
|
||||
};
|
||||
}
|
||||
|
||||
// Check if collection exists
|
||||
const exists = await storage.collectionExists(database, collection);
|
||||
if (!exists) {
|
||||
return { ok: 1, values: [] };
|
||||
}
|
||||
|
||||
// Get all documents
|
||||
const documents = await storage.findAll(database, collection);
|
||||
|
||||
// Get distinct values
|
||||
const values = QueryEngine.distinct(documents, key, query);
|
||||
|
||||
return { ok: 1, values };
|
||||
}
|
||||
}
|
||||
78
ts/congodb/server/handlers/HelloHandler.ts
Normal file
78
ts/congodb/server/handlers/HelloHandler.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import * as plugins from '../../congodb.plugins.js';
|
||||
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
||||
|
||||
/**
|
||||
* HelloHandler - Handles hello/isMaster handshake commands
|
||||
*
|
||||
* This is the first command sent by MongoDB drivers to establish a connection.
|
||||
* It returns server capabilities and configuration.
|
||||
*/
|
||||
export class HelloHandler implements ICommandHandler {
|
||||
async handle(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { command, server } = context;
|
||||
|
||||
// Build response with server capabilities
|
||||
const response: plugins.bson.Document = {
|
||||
ismaster: true,
|
||||
ok: 1,
|
||||
|
||||
// Maximum sizes
|
||||
maxBsonObjectSize: 16777216, // 16 MB
|
||||
maxMessageSizeBytes: 48000000, // 48 MB
|
||||
maxWriteBatchSize: 100000, // 100k documents per batch
|
||||
|
||||
// Timestamps
|
||||
localTime: new Date(),
|
||||
|
||||
// Session support
|
||||
logicalSessionTimeoutMinutes: 30,
|
||||
|
||||
// Connection info
|
||||
connectionId: 1,
|
||||
|
||||
// Wire protocol versions (support MongoDB 3.6 through 7.0)
|
||||
minWireVersion: 0,
|
||||
maxWireVersion: 21,
|
||||
|
||||
// Server mode
|
||||
readOnly: false,
|
||||
|
||||
// Topology info (standalone mode)
|
||||
isWritablePrimary: true,
|
||||
|
||||
// Additional info
|
||||
topologyVersion: {
|
||||
processId: new plugins.bson.ObjectId(),
|
||||
counter: plugins.bson.Long.fromNumber(0),
|
||||
},
|
||||
};
|
||||
|
||||
// Handle hello-specific fields
|
||||
if (command.hello || command.hello === 1) {
|
||||
response.helloOk = true;
|
||||
}
|
||||
|
||||
// Handle client metadata
|
||||
if (command.client) {
|
||||
// Client is providing metadata about itself
|
||||
// We just acknowledge it - no need to do anything special
|
||||
}
|
||||
|
||||
// Handle SASL mechanisms query
|
||||
if (command.saslSupportedMechs) {
|
||||
response.saslSupportedMechs = [
|
||||
// We don't actually support auth, but the driver needs to see this
|
||||
];
|
||||
}
|
||||
|
||||
// Compression support (none for now)
|
||||
if (command.compression) {
|
||||
response.compression = [];
|
||||
}
|
||||
|
||||
// Server version info
|
||||
response.version = '7.0.0';
|
||||
|
||||
return response;
|
||||
}
|
||||
}
|
||||
207
ts/congodb/server/handlers/IndexHandler.ts
Normal file
207
ts/congodb/server/handlers/IndexHandler.ts
Normal file
@@ -0,0 +1,207 @@
|
||||
import * as plugins from '../../congodb.plugins.js';
|
||||
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
||||
import { IndexEngine } from '../../engine/IndexEngine.js';
|
||||
|
||||
// Cache of index engines per collection
|
||||
const indexEngines: Map<string, IndexEngine> = new Map();
|
||||
|
||||
/**
|
||||
* Get or create an IndexEngine for a collection
|
||||
*/
|
||||
function getIndexEngine(storage: any, database: string, collection: string): IndexEngine {
|
||||
const key = `${database}.${collection}`;
|
||||
let engine = indexEngines.get(key);
|
||||
|
||||
if (!engine) {
|
||||
engine = new IndexEngine(database, collection, storage);
|
||||
indexEngines.set(key, engine);
|
||||
}
|
||||
|
||||
return engine;
|
||||
}
|
||||
|
||||
/**
|
||||
* IndexHandler - Handles createIndexes, dropIndexes, listIndexes commands
|
||||
*/
|
||||
export class IndexHandler implements ICommandHandler {
|
||||
async handle(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { command } = context;
|
||||
|
||||
if (command.createIndexes) {
|
||||
return this.handleCreateIndexes(context);
|
||||
} else if (command.dropIndexes) {
|
||||
return this.handleDropIndexes(context);
|
||||
} else if (command.listIndexes) {
|
||||
return this.handleListIndexes(context);
|
||||
}
|
||||
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'Unknown index command',
|
||||
code: 59,
|
||||
codeName: 'CommandNotFound',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle createIndexes command
|
||||
*/
|
||||
private async handleCreateIndexes(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { storage, database, command } = context;
|
||||
|
||||
const collection = command.createIndexes;
|
||||
const indexes = command.indexes || [];
|
||||
|
||||
if (!Array.isArray(indexes)) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'indexes must be an array',
|
||||
code: 2,
|
||||
codeName: 'BadValue',
|
||||
};
|
||||
}
|
||||
|
||||
// Ensure collection exists
|
||||
await storage.createCollection(database, collection);
|
||||
|
||||
const indexEngine = getIndexEngine(storage, database, collection);
|
||||
const createdNames: string[] = [];
|
||||
let numIndexesBefore = 0;
|
||||
let numIndexesAfter = 0;
|
||||
|
||||
try {
|
||||
const existingIndexes = await indexEngine.listIndexes();
|
||||
numIndexesBefore = existingIndexes.length;
|
||||
|
||||
for (const indexSpec of indexes) {
|
||||
const key = indexSpec.key;
|
||||
const options = {
|
||||
name: indexSpec.name,
|
||||
unique: indexSpec.unique,
|
||||
sparse: indexSpec.sparse,
|
||||
expireAfterSeconds: indexSpec.expireAfterSeconds,
|
||||
background: indexSpec.background,
|
||||
partialFilterExpression: indexSpec.partialFilterExpression,
|
||||
};
|
||||
|
||||
const name = await indexEngine.createIndex(key, options);
|
||||
createdNames.push(name);
|
||||
}
|
||||
|
||||
const finalIndexes = await indexEngine.listIndexes();
|
||||
numIndexesAfter = finalIndexes.length;
|
||||
} catch (error: any) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: error.message || 'Failed to create index',
|
||||
code: error.code || 1,
|
||||
codeName: error.codeName || 'InternalError',
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
ok: 1,
|
||||
numIndexesBefore,
|
||||
numIndexesAfter,
|
||||
createdCollectionAutomatically: false,
|
||||
commitQuorum: 'votingMembers',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle dropIndexes command
|
||||
*/
|
||||
private async handleDropIndexes(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { storage, database, command } = context;
|
||||
|
||||
const collection = command.dropIndexes;
|
||||
const indexName = command.index;
|
||||
|
||||
// Check if collection exists
|
||||
const exists = await storage.collectionExists(database, collection);
|
||||
if (!exists) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: `ns not found ${database}.${collection}`,
|
||||
code: 26,
|
||||
codeName: 'NamespaceNotFound',
|
||||
};
|
||||
}
|
||||
|
||||
const indexEngine = getIndexEngine(storage, database, collection);
|
||||
|
||||
try {
|
||||
if (indexName === '*') {
|
||||
// Drop all indexes except _id
|
||||
await indexEngine.dropAllIndexes();
|
||||
} else if (typeof indexName === 'string') {
|
||||
// Drop specific index by name
|
||||
await indexEngine.dropIndex(indexName);
|
||||
} else if (typeof indexName === 'object') {
|
||||
// Drop index by key specification
|
||||
const indexes = await indexEngine.listIndexes();
|
||||
const keyStr = JSON.stringify(indexName);
|
||||
|
||||
for (const idx of indexes) {
|
||||
if (JSON.stringify(idx.key) === keyStr) {
|
||||
await indexEngine.dropIndex(idx.name);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { ok: 1, nIndexesWas: 1 };
|
||||
} catch (error: any) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: error.message || 'Failed to drop index',
|
||||
code: error.code || 27,
|
||||
codeName: error.codeName || 'IndexNotFound',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle listIndexes command
|
||||
*/
|
||||
private async handleListIndexes(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { storage, database, command } = context;
|
||||
|
||||
const collection = command.listIndexes;
|
||||
const cursor = command.cursor || {};
|
||||
const batchSize = cursor.batchSize || 101;
|
||||
|
||||
// Check if collection exists
|
||||
const exists = await storage.collectionExists(database, collection);
|
||||
if (!exists) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: `ns not found ${database}.${collection}`,
|
||||
code: 26,
|
||||
codeName: 'NamespaceNotFound',
|
||||
};
|
||||
}
|
||||
|
||||
const indexEngine = getIndexEngine(storage, database, collection);
|
||||
const indexes = await indexEngine.listIndexes();
|
||||
|
||||
// Format indexes for response
|
||||
const indexDocs = indexes.map(idx => ({
|
||||
v: idx.v || 2,
|
||||
key: idx.key,
|
||||
name: idx.name,
|
||||
...(idx.unique ? { unique: idx.unique } : {}),
|
||||
...(idx.sparse ? { sparse: idx.sparse } : {}),
|
||||
...(idx.expireAfterSeconds !== undefined ? { expireAfterSeconds: idx.expireAfterSeconds } : {}),
|
||||
}));
|
||||
|
||||
return {
|
||||
ok: 1,
|
||||
cursor: {
|
||||
id: plugins.bson.Long.fromNumber(0),
|
||||
ns: `${database}.${collection}`,
|
||||
firstBatch: indexDocs,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
91
ts/congodb/server/handlers/InsertHandler.ts
Normal file
91
ts/congodb/server/handlers/InsertHandler.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
import * as plugins from '../../congodb.plugins.js';
|
||||
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
||||
|
||||
/**
|
||||
* InsertHandler - Handles insert commands
|
||||
*/
|
||||
export class InsertHandler implements ICommandHandler {
|
||||
async handle(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { storage, database, command, documentSequences } = context;
|
||||
|
||||
const collection = command.insert;
|
||||
if (typeof collection !== 'string') {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'insert command requires a collection name',
|
||||
code: 2,
|
||||
codeName: 'BadValue',
|
||||
};
|
||||
}
|
||||
|
||||
// Get documents from command or document sequences
|
||||
let documents: plugins.bson.Document[] = command.documents || [];
|
||||
|
||||
// Check for OP_MSG document sequences (for bulk inserts)
|
||||
if (documentSequences && documentSequences.has('documents')) {
|
||||
documents = documentSequences.get('documents')!;
|
||||
}
|
||||
|
||||
if (!Array.isArray(documents) || documents.length === 0) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'insert command requires documents array',
|
||||
code: 2,
|
||||
codeName: 'BadValue',
|
||||
};
|
||||
}
|
||||
|
||||
const ordered = command.ordered !== false;
|
||||
const writeErrors: plugins.bson.Document[] = [];
|
||||
let insertedCount = 0;
|
||||
|
||||
// Ensure collection exists
|
||||
await storage.createCollection(database, collection);
|
||||
|
||||
// Insert documents
|
||||
for (let i = 0; i < documents.length; i++) {
|
||||
const doc = documents[i];
|
||||
|
||||
try {
|
||||
// Ensure _id exists
|
||||
if (!doc._id) {
|
||||
doc._id = new plugins.bson.ObjectId();
|
||||
}
|
||||
|
||||
await storage.insertOne(database, collection, doc);
|
||||
insertedCount++;
|
||||
} catch (error: any) {
|
||||
const writeError: plugins.bson.Document = {
|
||||
index: i,
|
||||
code: error.code || 11000,
|
||||
errmsg: error.message || 'Insert failed',
|
||||
};
|
||||
|
||||
// Check for duplicate key error
|
||||
if (error.message?.includes('Duplicate key')) {
|
||||
writeError.code = 11000;
|
||||
writeError.keyPattern = { _id: 1 };
|
||||
writeError.keyValue = { _id: doc._id };
|
||||
}
|
||||
|
||||
writeErrors.push(writeError);
|
||||
|
||||
if (ordered) {
|
||||
// Stop on first error for ordered inserts
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const response: plugins.bson.Document = {
|
||||
ok: 1,
|
||||
n: insertedCount,
|
||||
};
|
||||
|
||||
if (writeErrors.length > 0) {
|
||||
response.writeErrors = writeErrors;
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
}
|
||||
315
ts/congodb/server/handlers/UpdateHandler.ts
Normal file
315
ts/congodb/server/handlers/UpdateHandler.ts
Normal file
@@ -0,0 +1,315 @@
|
||||
import * as plugins from '../../congodb.plugins.js';
|
||||
import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
|
||||
import { QueryEngine } from '../../engine/QueryEngine.js';
|
||||
import { UpdateEngine } from '../../engine/UpdateEngine.js';
|
||||
|
||||
/**
|
||||
* UpdateHandler - Handles update, findAndModify commands
|
||||
*/
|
||||
export class UpdateHandler implements ICommandHandler {
|
||||
async handle(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { command } = context;
|
||||
|
||||
// Check findAndModify first since it also has an 'update' field
|
||||
if (command.findAndModify) {
|
||||
return this.handleFindAndModify(context);
|
||||
} else if (command.update && typeof command.update === 'string') {
|
||||
// 'update' command has collection name as the value
|
||||
return this.handleUpdate(context);
|
||||
}
|
||||
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'Unknown update-related command',
|
||||
code: 59,
|
||||
codeName: 'CommandNotFound',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle update command
|
||||
*/
|
||||
private async handleUpdate(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { storage, database, command, documentSequences } = context;
|
||||
|
||||
const collection = command.update;
|
||||
if (typeof collection !== 'string') {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'update command requires a collection name',
|
||||
code: 2,
|
||||
codeName: 'BadValue',
|
||||
};
|
||||
}
|
||||
|
||||
// Get updates from command or document sequences
|
||||
let updates: plugins.bson.Document[] = command.updates || [];
|
||||
|
||||
// Check for OP_MSG document sequences
|
||||
if (documentSequences && documentSequences.has('updates')) {
|
||||
updates = documentSequences.get('updates')!;
|
||||
}
|
||||
|
||||
if (!Array.isArray(updates) || updates.length === 0) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'update command requires updates array',
|
||||
code: 2,
|
||||
codeName: 'BadValue',
|
||||
};
|
||||
}
|
||||
|
||||
const ordered = command.ordered !== false;
|
||||
const writeErrors: plugins.bson.Document[] = [];
|
||||
let totalMatched = 0;
|
||||
let totalModified = 0;
|
||||
let totalUpserted = 0;
|
||||
const upserted: plugins.bson.Document[] = [];
|
||||
|
||||
// Ensure collection exists
|
||||
await storage.createCollection(database, collection);
|
||||
|
||||
for (let i = 0; i < updates.length; i++) {
|
||||
const updateSpec = updates[i];
|
||||
const filter = updateSpec.q || updateSpec.filter || {};
|
||||
const update = updateSpec.u || updateSpec.update || {};
|
||||
const multi = updateSpec.multi || false;
|
||||
const upsert = updateSpec.upsert || false;
|
||||
const arrayFilters = updateSpec.arrayFilters;
|
||||
|
||||
try {
|
||||
// Get all documents
|
||||
let documents = await storage.findAll(database, collection);
|
||||
|
||||
// Apply filter
|
||||
let matchingDocs = QueryEngine.filter(documents, filter);
|
||||
|
||||
if (matchingDocs.length === 0 && upsert) {
|
||||
// Upsert: create new document
|
||||
const newDoc: plugins.bson.Document = { _id: new plugins.bson.ObjectId() };
|
||||
|
||||
// Apply filter fields to the new document
|
||||
this.applyFilterToDoc(newDoc, filter);
|
||||
|
||||
// Apply update
|
||||
const updatedDoc = UpdateEngine.applyUpdate(newDoc as any, update, arrayFilters);
|
||||
|
||||
// Handle $setOnInsert
|
||||
if (update.$setOnInsert) {
|
||||
Object.assign(updatedDoc, update.$setOnInsert);
|
||||
}
|
||||
|
||||
await storage.insertOne(database, collection, updatedDoc);
|
||||
totalUpserted++;
|
||||
upserted.push({ index: i, _id: updatedDoc._id });
|
||||
} else {
|
||||
// Update existing documents
|
||||
const docsToUpdate = multi ? matchingDocs : matchingDocs.slice(0, 1);
|
||||
totalMatched += docsToUpdate.length;
|
||||
|
||||
for (const doc of docsToUpdate) {
|
||||
const updatedDoc = UpdateEngine.applyUpdate(doc, update, arrayFilters);
|
||||
|
||||
// Check if document actually changed
|
||||
const changed = JSON.stringify(doc) !== JSON.stringify(updatedDoc);
|
||||
if (changed) {
|
||||
await storage.updateById(database, collection, doc._id, updatedDoc);
|
||||
totalModified++;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error: any) {
|
||||
writeErrors.push({
|
||||
index: i,
|
||||
code: error.code || 1,
|
||||
errmsg: error.message || 'Update failed',
|
||||
});
|
||||
|
||||
if (ordered) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const response: plugins.bson.Document = {
|
||||
ok: 1,
|
||||
n: totalMatched + totalUpserted,
|
||||
nModified: totalModified,
|
||||
};
|
||||
|
||||
if (upserted.length > 0) {
|
||||
response.upserted = upserted;
|
||||
}
|
||||
|
||||
if (writeErrors.length > 0) {
|
||||
response.writeErrors = writeErrors;
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle findAndModify command
|
||||
*/
|
||||
private async handleFindAndModify(context: IHandlerContext): Promise<plugins.bson.Document> {
|
||||
const { storage, database, command } = context;
|
||||
|
||||
const collection = command.findAndModify;
|
||||
const query = command.query || {};
|
||||
const update = command.update;
|
||||
const remove = command.remove || false;
|
||||
const returnNew = command.new || false;
|
||||
const upsert = command.upsert || false;
|
||||
const sort = command.sort;
|
||||
const fields = command.fields;
|
||||
const arrayFilters = command.arrayFilters;
|
||||
|
||||
// Validate - either update or remove, not both
|
||||
if (update && remove) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'cannot specify both update and remove',
|
||||
code: 2,
|
||||
codeName: 'BadValue',
|
||||
};
|
||||
}
|
||||
|
||||
if (!update && !remove) {
|
||||
return {
|
||||
ok: 0,
|
||||
errmsg: 'either update or remove is required',
|
||||
code: 2,
|
||||
codeName: 'BadValue',
|
||||
};
|
||||
}
|
||||
|
||||
// Ensure collection exists
|
||||
await storage.createCollection(database, collection);
|
||||
|
||||
// Get matching documents
|
||||
let documents = await storage.findAll(database, collection);
|
||||
let matchingDocs = QueryEngine.filter(documents, query);
|
||||
|
||||
// Apply sort if specified
|
||||
if (sort) {
|
||||
matchingDocs = QueryEngine.sort(matchingDocs, sort);
|
||||
}
|
||||
|
||||
const doc = matchingDocs[0];
|
||||
|
||||
if (remove) {
|
||||
// Delete operation
|
||||
if (!doc) {
|
||||
return { ok: 1, value: null };
|
||||
}
|
||||
|
||||
await storage.deleteById(database, collection, doc._id);
|
||||
|
||||
let result = doc;
|
||||
if (fields) {
|
||||
result = QueryEngine.project([doc], fields)[0] as any;
|
||||
}
|
||||
|
||||
return {
|
||||
ok: 1,
|
||||
value: result,
|
||||
lastErrorObject: {
|
||||
n: 1,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
// Update operation
|
||||
if (!doc && !upsert) {
|
||||
return { ok: 1, value: null };
|
||||
}
|
||||
|
||||
let resultDoc: plugins.bson.Document;
|
||||
let originalDoc: plugins.bson.Document | null = null;
|
||||
let isUpsert = false;
|
||||
|
||||
if (doc) {
|
||||
// Update existing
|
||||
originalDoc = { ...doc };
|
||||
resultDoc = UpdateEngine.applyUpdate(doc, update, arrayFilters);
|
||||
await storage.updateById(database, collection, doc._id, resultDoc as any);
|
||||
} else {
|
||||
// Upsert
|
||||
isUpsert = true;
|
||||
const newDoc: plugins.bson.Document = { _id: new plugins.bson.ObjectId() };
|
||||
this.applyFilterToDoc(newDoc, query);
|
||||
resultDoc = UpdateEngine.applyUpdate(newDoc as any, update, arrayFilters);
|
||||
|
||||
if (update.$setOnInsert) {
|
||||
Object.assign(resultDoc, update.$setOnInsert);
|
||||
}
|
||||
|
||||
await storage.insertOne(database, collection, resultDoc);
|
||||
}
|
||||
|
||||
// Apply projection
|
||||
let returnValue = returnNew ? resultDoc : (originalDoc || null);
|
||||
if (returnValue && fields) {
|
||||
returnValue = QueryEngine.project([returnValue as any], fields)[0];
|
||||
}
|
||||
|
||||
const response: plugins.bson.Document = {
|
||||
ok: 1,
|
||||
value: returnValue,
|
||||
lastErrorObject: {
|
||||
n: 1,
|
||||
updatedExisting: !isUpsert && doc !== undefined,
|
||||
},
|
||||
};
|
||||
|
||||
if (isUpsert) {
|
||||
response.lastErrorObject.upserted = resultDoc._id;
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply filter equality conditions to a new document (for upsert)
|
||||
*/
|
||||
private applyFilterToDoc(doc: plugins.bson.Document, filter: plugins.bson.Document): void {
|
||||
for (const [key, value] of Object.entries(filter)) {
|
||||
// Skip operators
|
||||
if (key.startsWith('$')) continue;
|
||||
|
||||
// Handle nested paths
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
// Check if it's an operator
|
||||
const valueKeys = Object.keys(value);
|
||||
if (valueKeys.some(k => k.startsWith('$'))) {
|
||||
// Extract equality value from $eq if present
|
||||
if ('$eq' in value) {
|
||||
this.setNestedValue(doc, key, value.$eq);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Direct value assignment
|
||||
this.setNestedValue(doc, key, value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a nested value using dot notation
|
||||
*/
|
||||
private setNestedValue(obj: plugins.bson.Document, path: string, value: any): void {
|
||||
const parts = path.split('.');
|
||||
let current = obj;
|
||||
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
const part = parts[i];
|
||||
if (!(part in current)) {
|
||||
current[part] = {};
|
||||
}
|
||||
current = current[part];
|
||||
}
|
||||
|
||||
current[parts[parts.length - 1]] = value;
|
||||
}
|
||||
}
|
||||
10
ts/congodb/server/handlers/index.ts
Normal file
10
ts/congodb/server/handlers/index.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
// Export all command handlers
|
||||
|
||||
export { HelloHandler } from './HelloHandler.js';
|
||||
export { InsertHandler } from './InsertHandler.js';
|
||||
export { FindHandler } from './FindHandler.js';
|
||||
export { UpdateHandler } from './UpdateHandler.js';
|
||||
export { DeleteHandler } from './DeleteHandler.js';
|
||||
export { AggregateHandler } from './AggregateHandler.js';
|
||||
export { IndexHandler } from './IndexHandler.js';
|
||||
export { AdminHandler } from './AdminHandler.js';
|
||||
Reference in New Issue
Block a user