2026-02-01 14:34:07 +00:00
|
|
|
import * as plugins from '../tsmdb.plugins.js';
|
2026-01-31 11:33:11 +00:00
|
|
|
import type { IStorageAdapter } from './IStorageAdapter.js';
|
|
|
|
|
import type { IStoredDocument, IOpLogEntry, Document } from '../types/interfaces.js';
|
|
|
|
|
|
|
|
|
|
/**
|
2026-02-01 14:34:07 +00:00
|
|
|
* File-based storage adapter for TsmDB
|
2026-01-31 11:33:11 +00:00
|
|
|
* Stores data in JSON files on disk for persistence
|
|
|
|
|
*/
|
|
|
|
|
export class FileStorageAdapter implements IStorageAdapter {
|
|
|
|
|
private basePath: string;
|
|
|
|
|
private opLogCounter = 0;
|
|
|
|
|
private initialized = false;
|
|
|
|
|
private fs = new plugins.smartfs.SmartFs(new plugins.smartfs.SmartFsProviderNode());
|
|
|
|
|
|
|
|
|
|
constructor(basePath: string) {
|
|
|
|
|
this.basePath = basePath;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Helper Methods
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
private getDbPath(dbName: string): string {
|
|
|
|
|
return plugins.smartpath.join(this.basePath, dbName);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private getCollectionPath(dbName: string, collName: string): string {
|
|
|
|
|
return plugins.smartpath.join(this.basePath, dbName, `${collName}.json`);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private getIndexPath(dbName: string, collName: string): string {
|
|
|
|
|
return plugins.smartpath.join(this.basePath, dbName, `${collName}.indexes.json`);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private getOpLogPath(): string {
|
|
|
|
|
return plugins.smartpath.join(this.basePath, '_oplog.json');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private getMetaPath(): string {
|
|
|
|
|
return plugins.smartpath.join(this.basePath, '_meta.json');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private async readJsonFile<T>(filePath: string, defaultValue: T): Promise<T> {
|
|
|
|
|
try {
|
|
|
|
|
const exists = await this.fs.file(filePath).exists();
|
|
|
|
|
if (!exists) return defaultValue;
|
|
|
|
|
const content = await this.fs.file(filePath).encoding('utf8').read();
|
|
|
|
|
return JSON.parse(content as string);
|
|
|
|
|
} catch {
|
|
|
|
|
return defaultValue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private async writeJsonFile(filePath: string, data: any): Promise<void> {
|
|
|
|
|
const dir = filePath.substring(0, filePath.lastIndexOf('/'));
|
|
|
|
|
await this.fs.directory(dir).recursive().create();
|
|
|
|
|
await this.fs.file(filePath).encoding('utf8').write(JSON.stringify(data, null, 2));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private restoreObjectIds(doc: any): IStoredDocument {
|
|
|
|
|
if (doc._id) {
|
|
|
|
|
if (typeof doc._id === 'string') {
|
|
|
|
|
doc._id = new plugins.bson.ObjectId(doc._id);
|
|
|
|
|
} else if (typeof doc._id === 'object' && doc._id.$oid) {
|
|
|
|
|
doc._id = new plugins.bson.ObjectId(doc._id.$oid);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return doc;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Initialization
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
async initialize(): Promise<void> {
|
|
|
|
|
if (this.initialized) return;
|
|
|
|
|
|
|
|
|
|
await this.fs.directory(this.basePath).recursive().create();
|
|
|
|
|
|
|
|
|
|
// Load metadata
|
|
|
|
|
const meta = await this.readJsonFile(this.getMetaPath(), { opLogCounter: 0 });
|
|
|
|
|
this.opLogCounter = meta.opLogCounter || 0;
|
|
|
|
|
|
|
|
|
|
this.initialized = true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async close(): Promise<void> {
|
|
|
|
|
// Save metadata
|
|
|
|
|
await this.writeJsonFile(this.getMetaPath(), { opLogCounter: this.opLogCounter });
|
|
|
|
|
this.initialized = false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Database Operations
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
async listDatabases(): Promise<string[]> {
|
|
|
|
|
await this.initialize();
|
|
|
|
|
try {
|
|
|
|
|
const entries = await this.fs.directory(this.basePath).list();
|
|
|
|
|
return entries
|
|
|
|
|
.filter(entry => entry.isDirectory && !entry.name.startsWith('_'))
|
|
|
|
|
.map(entry => entry.name);
|
|
|
|
|
} catch {
|
|
|
|
|
return [];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async createDatabase(dbName: string): Promise<void> {
|
|
|
|
|
await this.initialize();
|
|
|
|
|
const dbPath = this.getDbPath(dbName);
|
|
|
|
|
await this.fs.directory(dbPath).recursive().create();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async dropDatabase(dbName: string): Promise<boolean> {
|
|
|
|
|
await this.initialize();
|
|
|
|
|
const dbPath = this.getDbPath(dbName);
|
|
|
|
|
try {
|
|
|
|
|
const exists = await this.fs.directory(dbPath).exists();
|
|
|
|
|
if (exists) {
|
|
|
|
|
await this.fs.directory(dbPath).recursive().delete();
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
} catch {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async databaseExists(dbName: string): Promise<boolean> {
|
|
|
|
|
await this.initialize();
|
|
|
|
|
const dbPath = this.getDbPath(dbName);
|
|
|
|
|
return this.fs.directory(dbPath).exists();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Collection Operations
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
async listCollections(dbName: string): Promise<string[]> {
|
|
|
|
|
await this.initialize();
|
|
|
|
|
const dbPath = this.getDbPath(dbName);
|
|
|
|
|
try {
|
|
|
|
|
const entries = await this.fs.directory(dbPath).list();
|
|
|
|
|
return entries
|
|
|
|
|
.filter(entry => entry.isFile && entry.name.endsWith('.json') && !entry.name.endsWith('.indexes.json'))
|
|
|
|
|
.map(entry => entry.name.replace('.json', ''));
|
|
|
|
|
} catch {
|
|
|
|
|
return [];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async createCollection(dbName: string, collName: string): Promise<void> {
|
|
|
|
|
await this.createDatabase(dbName);
|
|
|
|
|
const collPath = this.getCollectionPath(dbName, collName);
|
|
|
|
|
const exists = await this.fs.file(collPath).exists();
|
|
|
|
|
if (!exists) {
|
|
|
|
|
await this.writeJsonFile(collPath, []);
|
|
|
|
|
// Create default _id index
|
|
|
|
|
await this.writeJsonFile(this.getIndexPath(dbName, collName), [
|
|
|
|
|
{ name: '_id_', key: { _id: 1 }, unique: true }
|
|
|
|
|
]);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async dropCollection(dbName: string, collName: string): Promise<boolean> {
|
|
|
|
|
await this.initialize();
|
|
|
|
|
const collPath = this.getCollectionPath(dbName, collName);
|
|
|
|
|
const indexPath = this.getIndexPath(dbName, collName);
|
|
|
|
|
try {
|
|
|
|
|
const exists = await this.fs.file(collPath).exists();
|
|
|
|
|
if (exists) {
|
|
|
|
|
await this.fs.file(collPath).delete();
|
|
|
|
|
try {
|
|
|
|
|
await this.fs.file(indexPath).delete();
|
|
|
|
|
} catch {}
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
} catch {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async collectionExists(dbName: string, collName: string): Promise<boolean> {
|
|
|
|
|
await this.initialize();
|
|
|
|
|
const collPath = this.getCollectionPath(dbName, collName);
|
|
|
|
|
return this.fs.file(collPath).exists();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async renameCollection(dbName: string, oldName: string, newName: string): Promise<void> {
|
|
|
|
|
await this.initialize();
|
|
|
|
|
const oldPath = this.getCollectionPath(dbName, oldName);
|
|
|
|
|
const newPath = this.getCollectionPath(dbName, newName);
|
|
|
|
|
const oldIndexPath = this.getIndexPath(dbName, oldName);
|
|
|
|
|
const newIndexPath = this.getIndexPath(dbName, newName);
|
|
|
|
|
|
|
|
|
|
const exists = await this.fs.file(oldPath).exists();
|
|
|
|
|
if (!exists) {
|
|
|
|
|
throw new Error(`Collection ${oldName} not found`);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Read, write to new, delete old
|
|
|
|
|
const docs = await this.readJsonFile<any[]>(oldPath, []);
|
|
|
|
|
await this.writeJsonFile(newPath, docs);
|
|
|
|
|
await this.fs.file(oldPath).delete();
|
|
|
|
|
|
|
|
|
|
// Handle indexes
|
|
|
|
|
const indexes = await this.readJsonFile<any[]>(oldIndexPath, []);
|
|
|
|
|
await this.writeJsonFile(newIndexPath, indexes);
|
|
|
|
|
try {
|
|
|
|
|
await this.fs.file(oldIndexPath).delete();
|
|
|
|
|
} catch {}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Document Operations
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
async insertOne(dbName: string, collName: string, doc: Document): Promise<IStoredDocument> {
|
|
|
|
|
await this.createCollection(dbName, collName);
|
|
|
|
|
const collPath = this.getCollectionPath(dbName, collName);
|
|
|
|
|
const docs = await this.readJsonFile<any[]>(collPath, []);
|
|
|
|
|
|
|
|
|
|
const storedDoc: IStoredDocument = {
|
|
|
|
|
...doc,
|
|
|
|
|
_id: doc._id ? (doc._id instanceof plugins.bson.ObjectId ? doc._id : new plugins.bson.ObjectId(doc._id)) : new plugins.bson.ObjectId(),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Check for duplicate
|
|
|
|
|
const idStr = storedDoc._id.toHexString();
|
|
|
|
|
if (docs.some(d => d._id === idStr || (d._id && d._id.toString() === idStr))) {
|
|
|
|
|
throw new Error(`Duplicate key error: _id ${idStr}`);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
docs.push(storedDoc);
|
|
|
|
|
await this.writeJsonFile(collPath, docs);
|
|
|
|
|
return storedDoc;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async insertMany(dbName: string, collName: string, docsToInsert: Document[]): Promise<IStoredDocument[]> {
|
|
|
|
|
await this.createCollection(dbName, collName);
|
|
|
|
|
const collPath = this.getCollectionPath(dbName, collName);
|
|
|
|
|
const docs = await this.readJsonFile<any[]>(collPath, []);
|
|
|
|
|
|
|
|
|
|
const results: IStoredDocument[] = [];
|
|
|
|
|
const existingIds = new Set(docs.map(d => d._id?.toString?.() || d._id));
|
|
|
|
|
|
|
|
|
|
for (const doc of docsToInsert) {
|
|
|
|
|
const storedDoc: IStoredDocument = {
|
|
|
|
|
...doc,
|
|
|
|
|
_id: doc._id ? (doc._id instanceof plugins.bson.ObjectId ? doc._id : new plugins.bson.ObjectId(doc._id)) : new plugins.bson.ObjectId(),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
const idStr = storedDoc._id.toHexString();
|
|
|
|
|
if (existingIds.has(idStr)) {
|
|
|
|
|
throw new Error(`Duplicate key error: _id ${idStr}`);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
existingIds.add(idStr);
|
|
|
|
|
docs.push(storedDoc);
|
|
|
|
|
results.push(storedDoc);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
await this.writeJsonFile(collPath, docs);
|
|
|
|
|
return results;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async findAll(dbName: string, collName: string): Promise<IStoredDocument[]> {
|
|
|
|
|
await this.createCollection(dbName, collName);
|
|
|
|
|
const collPath = this.getCollectionPath(dbName, collName);
|
|
|
|
|
const docs = await this.readJsonFile<any[]>(collPath, []);
|
|
|
|
|
return docs.map(doc => this.restoreObjectIds(doc));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async findById(dbName: string, collName: string, id: plugins.bson.ObjectId): Promise<IStoredDocument | null> {
|
|
|
|
|
const docs = await this.findAll(dbName, collName);
|
|
|
|
|
const idStr = id.toHexString();
|
|
|
|
|
return docs.find(d => d._id.toHexString() === idStr) || null;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async updateById(dbName: string, collName: string, id: plugins.bson.ObjectId, doc: IStoredDocument): Promise<boolean> {
|
|
|
|
|
const collPath = this.getCollectionPath(dbName, collName);
|
|
|
|
|
const docs = await this.readJsonFile<any[]>(collPath, []);
|
|
|
|
|
const idStr = id.toHexString();
|
|
|
|
|
|
|
|
|
|
const idx = docs.findIndex(d => {
|
|
|
|
|
const docId = d._id?.toHexString?.() || d._id?.toString?.() || d._id;
|
|
|
|
|
return docId === idStr;
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
if (idx === -1) return false;
|
|
|
|
|
|
|
|
|
|
docs[idx] = doc;
|
|
|
|
|
await this.writeJsonFile(collPath, docs);
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async deleteById(dbName: string, collName: string, id: plugins.bson.ObjectId): Promise<boolean> {
|
|
|
|
|
const collPath = this.getCollectionPath(dbName, collName);
|
|
|
|
|
const docs = await this.readJsonFile<any[]>(collPath, []);
|
|
|
|
|
const idStr = id.toHexString();
|
|
|
|
|
|
|
|
|
|
const idx = docs.findIndex(d => {
|
|
|
|
|
const docId = d._id?.toHexString?.() || d._id?.toString?.() || d._id;
|
|
|
|
|
return docId === idStr;
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
if (idx === -1) return false;
|
|
|
|
|
|
|
|
|
|
docs.splice(idx, 1);
|
|
|
|
|
await this.writeJsonFile(collPath, docs);
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async deleteByIds(dbName: string, collName: string, ids: plugins.bson.ObjectId[]): Promise<number> {
|
|
|
|
|
const collPath = this.getCollectionPath(dbName, collName);
|
|
|
|
|
const docs = await this.readJsonFile<any[]>(collPath, []);
|
|
|
|
|
const idStrs = new Set(ids.map(id => id.toHexString()));
|
|
|
|
|
|
|
|
|
|
const originalLength = docs.length;
|
|
|
|
|
const filtered = docs.filter(d => {
|
|
|
|
|
const docId = d._id?.toHexString?.() || d._id?.toString?.() || d._id;
|
|
|
|
|
return !idStrs.has(docId);
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
await this.writeJsonFile(collPath, filtered);
|
|
|
|
|
return originalLength - filtered.length;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async count(dbName: string, collName: string): Promise<number> {
|
|
|
|
|
const collPath = this.getCollectionPath(dbName, collName);
|
|
|
|
|
const docs = await this.readJsonFile<any[]>(collPath, []);
|
|
|
|
|
return docs.length;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Index Operations
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
async saveIndex(
|
|
|
|
|
dbName: string,
|
|
|
|
|
collName: string,
|
|
|
|
|
indexName: string,
|
|
|
|
|
indexSpec: { key: Record<string, any>; unique?: boolean; sparse?: boolean; expireAfterSeconds?: number }
|
|
|
|
|
): Promise<void> {
|
|
|
|
|
await this.createCollection(dbName, collName);
|
|
|
|
|
const indexPath = this.getIndexPath(dbName, collName);
|
|
|
|
|
const indexes = await this.readJsonFile<any[]>(indexPath, [
|
|
|
|
|
{ name: '_id_', key: { _id: 1 }, unique: true }
|
|
|
|
|
]);
|
|
|
|
|
|
|
|
|
|
const existingIdx = indexes.findIndex(i => i.name === indexName);
|
|
|
|
|
if (existingIdx >= 0) {
|
|
|
|
|
indexes[existingIdx] = { name: indexName, ...indexSpec };
|
|
|
|
|
} else {
|
|
|
|
|
indexes.push({ name: indexName, ...indexSpec });
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
await this.writeJsonFile(indexPath, indexes);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async getIndexes(dbName: string, collName: string): Promise<Array<{
|
|
|
|
|
name: string;
|
|
|
|
|
key: Record<string, any>;
|
|
|
|
|
unique?: boolean;
|
|
|
|
|
sparse?: boolean;
|
|
|
|
|
expireAfterSeconds?: number;
|
|
|
|
|
}>> {
|
|
|
|
|
const indexPath = this.getIndexPath(dbName, collName);
|
|
|
|
|
return this.readJsonFile(indexPath, [{ name: '_id_', key: { _id: 1 }, unique: true }]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async dropIndex(dbName: string, collName: string, indexName: string): Promise<boolean> {
|
|
|
|
|
if (indexName === '_id_') {
|
|
|
|
|
throw new Error('Cannot drop _id index');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const indexPath = this.getIndexPath(dbName, collName);
|
|
|
|
|
const indexes = await this.readJsonFile<any[]>(indexPath, []);
|
|
|
|
|
|
|
|
|
|
const idx = indexes.findIndex(i => i.name === indexName);
|
|
|
|
|
if (idx >= 0) {
|
|
|
|
|
indexes.splice(idx, 1);
|
|
|
|
|
await this.writeJsonFile(indexPath, indexes);
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// OpLog Operations
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
async appendOpLog(entry: IOpLogEntry): Promise<void> {
|
|
|
|
|
const opLogPath = this.getOpLogPath();
|
|
|
|
|
const opLog = await this.readJsonFile<IOpLogEntry[]>(opLogPath, []);
|
|
|
|
|
opLog.push(entry);
|
|
|
|
|
|
|
|
|
|
// Trim oplog if it gets too large
|
|
|
|
|
if (opLog.length > 10000) {
|
|
|
|
|
opLog.splice(0, opLog.length - 10000);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
await this.writeJsonFile(opLogPath, opLog);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async getOpLogAfter(ts: plugins.bson.Timestamp, limit: number = 1000): Promise<IOpLogEntry[]> {
|
|
|
|
|
const opLogPath = this.getOpLogPath();
|
|
|
|
|
const opLog = await this.readJsonFile<any[]>(opLogPath, []);
|
|
|
|
|
const tsValue = ts.toNumber();
|
|
|
|
|
|
|
|
|
|
const entries = opLog.filter(e => {
|
|
|
|
|
const entryTs = e.ts.toNumber ? e.ts.toNumber() : (e.ts.t * 4294967296 + e.ts.i);
|
|
|
|
|
return entryTs > tsValue;
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
return entries.slice(0, limit);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async getLatestOpLogTimestamp(): Promise<plugins.bson.Timestamp | null> {
|
|
|
|
|
const opLogPath = this.getOpLogPath();
|
|
|
|
|
const opLog = await this.readJsonFile<any[]>(opLogPath, []);
|
|
|
|
|
if (opLog.length === 0) return null;
|
|
|
|
|
|
|
|
|
|
const last = opLog[opLog.length - 1];
|
|
|
|
|
if (last.ts instanceof plugins.bson.Timestamp) {
|
|
|
|
|
return last.ts;
|
|
|
|
|
}
|
|
|
|
|
return new plugins.bson.Timestamp({ t: last.ts.t, i: last.ts.i });
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
generateTimestamp(): plugins.bson.Timestamp {
|
|
|
|
|
this.opLogCounter++;
|
|
|
|
|
return new plugins.bson.Timestamp({ t: Math.floor(Date.now() / 1000), i: this.opLogCounter });
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Transaction Support
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
async createSnapshot(dbName: string, collName: string): Promise<IStoredDocument[]> {
|
|
|
|
|
const docs = await this.findAll(dbName, collName);
|
|
|
|
|
return docs.map(doc => JSON.parse(JSON.stringify(doc)));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async hasConflicts(
|
|
|
|
|
dbName: string,
|
|
|
|
|
collName: string,
|
|
|
|
|
ids: plugins.bson.ObjectId[],
|
|
|
|
|
snapshotTime: plugins.bson.Timestamp
|
|
|
|
|
): Promise<boolean> {
|
|
|
|
|
const opLogPath = this.getOpLogPath();
|
|
|
|
|
const opLog = await this.readJsonFile<any[]>(opLogPath, []);
|
|
|
|
|
const ns = `${dbName}.${collName}`;
|
|
|
|
|
const snapshotTs = snapshotTime.toNumber();
|
|
|
|
|
const modifiedIds = new Set<string>();
|
|
|
|
|
|
|
|
|
|
for (const entry of opLog) {
|
|
|
|
|
const entryTs = entry.ts.toNumber ? entry.ts.toNumber() : (entry.ts.t * 4294967296 + entry.ts.i);
|
|
|
|
|
if (entryTs > snapshotTs && entry.ns === ns) {
|
|
|
|
|
if (entry.o._id) {
|
|
|
|
|
modifiedIds.add(entry.o._id.toString());
|
|
|
|
|
}
|
|
|
|
|
if (entry.o2?._id) {
|
|
|
|
|
modifiedIds.add(entry.o2._id.toString());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (const id of ids) {
|
|
|
|
|
if (modifiedIds.has(id.toString())) {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
}
|