feat(smartmigration): add initial smartmigration package with MongoDB and S3 migration runner
This commit is contained in:
41
ts/ledgers/classes.ledger.ts
Normal file
41
ts/ledgers/classes.ledger.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import type {
|
||||
ISmartMigrationLedgerData,
|
||||
} from '../interfaces.js';
|
||||
|
||||
/**
|
||||
* Abstract ledger interface — both MongoLedger and S3Ledger implement this.
|
||||
*
|
||||
* Lifecycle:
|
||||
* 1. `init()` — open the underlying store, create empty document if needed
|
||||
* 2. `read()` — return the current ledger data
|
||||
* 3. `write(data)` — overwrite the ledger with the given data
|
||||
* 4. `acquireLock(holderId, ttlMs)` — best-effort lock; returns true on success
|
||||
* 5. `releaseLock(holderId)` — clear the lock if we still hold it
|
||||
* 6. `close()` — release any resources
|
||||
*
|
||||
* The ledger data is a single self-contained JSON-serializable object.
|
||||
* Both backends store it as a single document (mongo via EasyStore, s3 via
|
||||
* a single sidecar object).
|
||||
*/
|
||||
export abstract class Ledger {
|
||||
public abstract init(): Promise<void>;
|
||||
public abstract read(): Promise<ISmartMigrationLedgerData>;
|
||||
public abstract write(data: ISmartMigrationLedgerData): Promise<void>;
|
||||
public abstract acquireLock(holderId: string, ttlMs: number): Promise<boolean>;
|
||||
public abstract releaseLock(holderId: string): Promise<void>;
|
||||
public abstract close(): Promise<void>;
|
||||
}
|
||||
|
||||
/** Build a fresh, empty ledger document. */
|
||||
export function emptyLedgerData(): ISmartMigrationLedgerData {
|
||||
return {
|
||||
currentVersion: null,
|
||||
steps: {},
|
||||
lock: {
|
||||
holder: null,
|
||||
acquiredAt: null,
|
||||
expiresAt: null,
|
||||
},
|
||||
checkpoints: {},
|
||||
};
|
||||
}
|
||||
106
ts/ledgers/classes.mongoledger.ts
Normal file
106
ts/ledgers/classes.mongoledger.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import type * as plugins from '../plugins.js';
|
||||
import type { ISmartMigrationLedgerData } from '../interfaces.js';
|
||||
import { Ledger, emptyLedgerData } from './classes.ledger.js';
|
||||
|
||||
/**
|
||||
* Mongo-backed ledger that persists `ISmartMigrationLedgerData` as a single
|
||||
* document via smartdata's `EasyStore`. The EasyStore's nameId is
|
||||
* `smartmigration:<ledgerName>`, scoping multiple migration ledgers in the
|
||||
* same database.
|
||||
*/
|
||||
export class MongoLedger extends Ledger {
|
||||
private db: plugins.smartdata.SmartdataDb;
|
||||
private ledgerName: string;
|
||||
private easyStore: any | null = null; // EasyStore<ISmartMigrationLedgerData> — typed loosely because the peer type may not be present at compile time
|
||||
|
||||
constructor(db: plugins.smartdata.SmartdataDb, ledgerName: string) {
|
||||
super();
|
||||
this.db = db;
|
||||
this.ledgerName = ledgerName;
|
||||
}
|
||||
|
||||
public async init(): Promise<void> {
|
||||
this.easyStore = await this.db.createEasyStore(`smartmigration:${this.ledgerName}`);
|
||||
// EasyStore creates an empty `data: {}` on first read. Hydrate it to the
|
||||
// canonical empty shape so subsequent reads always return all fields.
|
||||
const existing = (await this.easyStore.readAll()) as Partial<ISmartMigrationLedgerData>;
|
||||
if (
|
||||
existing.currentVersion === undefined ||
|
||||
existing.steps === undefined ||
|
||||
existing.lock === undefined ||
|
||||
existing.checkpoints === undefined
|
||||
) {
|
||||
await this.easyStore.writeAll(emptyLedgerData());
|
||||
}
|
||||
}
|
||||
|
||||
public async read(): Promise<ISmartMigrationLedgerData> {
|
||||
if (!this.easyStore) {
|
||||
throw new Error('MongoLedger.read() called before init()');
|
||||
}
|
||||
const data = (await this.easyStore.readAll()) as ISmartMigrationLedgerData;
|
||||
return this.normalize(data);
|
||||
}
|
||||
|
||||
public async write(data: ISmartMigrationLedgerData): Promise<void> {
|
||||
if (!this.easyStore) {
|
||||
throw new Error('MongoLedger.write() called before init()');
|
||||
}
|
||||
// Use EasyStore.replace (added in @push.rocks/smartdata 7.1.7) for true
|
||||
// overwrite semantics. This lets us actually delete keys from
|
||||
// checkpoints / steps when the in-memory ledger drops them — writeAll
|
||||
// would merge and silently retain them.
|
||||
await this.easyStore.replace(data);
|
||||
}
|
||||
|
||||
public async acquireLock(holderId: string, ttlMs: number): Promise<boolean> {
|
||||
const data = await this.read();
|
||||
const now = new Date();
|
||||
const lockHeld = data.lock.holder !== null;
|
||||
const lockExpired =
|
||||
data.lock.expiresAt !== null && new Date(data.lock.expiresAt).getTime() < now.getTime();
|
||||
|
||||
if (lockHeld && !lockExpired) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const expiresAt = new Date(now.getTime() + ttlMs);
|
||||
data.lock = {
|
||||
holder: holderId,
|
||||
acquiredAt: now.toISOString(),
|
||||
expiresAt: expiresAt.toISOString(),
|
||||
};
|
||||
await this.write(data);
|
||||
|
||||
// Re-read to confirm we won the race. EasyStore is last-writer-wins so
|
||||
// this is a probabilistic CAS, not a true atomic CAS — adequate for v1.
|
||||
const verify = await this.read();
|
||||
return verify.lock.holder === holderId;
|
||||
}
|
||||
|
||||
public async releaseLock(holderId: string): Promise<void> {
|
||||
const data = await this.read();
|
||||
if (data.lock.holder !== holderId) {
|
||||
// Lock was stolen or never held — nothing to release.
|
||||
return;
|
||||
}
|
||||
data.lock = { holder: null, acquiredAt: null, expiresAt: null };
|
||||
await this.write(data);
|
||||
}
|
||||
|
||||
public async close(): Promise<void> {
|
||||
// EasyStore has no explicit close — it just dereferences when the parent
|
||||
// SmartdataDb closes.
|
||||
this.easyStore = null;
|
||||
}
|
||||
|
||||
/** Fill in any missing top-level fields with their defaults. */
|
||||
private normalize(data: Partial<ISmartMigrationLedgerData>): ISmartMigrationLedgerData {
|
||||
return {
|
||||
currentVersion: data.currentVersion ?? null,
|
||||
steps: data.steps ?? {},
|
||||
lock: data.lock ?? { holder: null, acquiredAt: null, expiresAt: null },
|
||||
checkpoints: data.checkpoints ?? {},
|
||||
};
|
||||
}
|
||||
}
|
||||
92
ts/ledgers/classes.s3ledger.ts
Normal file
92
ts/ledgers/classes.s3ledger.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import type * as plugins from '../plugins.js';
|
||||
import type { ISmartMigrationLedgerData } from '../interfaces.js';
|
||||
import { Ledger, emptyLedgerData } from './classes.ledger.js';
|
||||
|
||||
/**
|
||||
* S3-backed ledger that persists `ISmartMigrationLedgerData` as a single
|
||||
* JSON object at `<bucket>/.smartmigration/<ledgerName>.json`.
|
||||
*
|
||||
* Locking is best-effort: S3 has no conditional writes (without versioning
|
||||
* + a separate index). Single-instance SaaS deployments are fine; multi-
|
||||
* instance deployments should use the mongo ledger or provide external
|
||||
* coordination.
|
||||
*/
|
||||
export class S3Ledger extends Ledger {
|
||||
private bucket: plugins.smartbucket.Bucket;
|
||||
private path: string;
|
||||
|
||||
constructor(bucket: plugins.smartbucket.Bucket, ledgerName: string) {
|
||||
super();
|
||||
this.bucket = bucket;
|
||||
this.path = `.smartmigration/${ledgerName}.json`;
|
||||
}
|
||||
|
||||
public async init(): Promise<void> {
|
||||
const exists = await (this.bucket as any).fastExists({ path: this.path });
|
||||
if (!exists) {
|
||||
await this.write(emptyLedgerData());
|
||||
}
|
||||
}
|
||||
|
||||
public async read(): Promise<ISmartMigrationLedgerData> {
|
||||
const buffer = await (this.bucket as any).fastGet({ path: this.path });
|
||||
const data = JSON.parse(buffer.toString('utf8')) as Partial<ISmartMigrationLedgerData>;
|
||||
return this.normalize(data);
|
||||
}
|
||||
|
||||
public async write(data: ISmartMigrationLedgerData): Promise<void> {
|
||||
const json = JSON.stringify(data, null, 2);
|
||||
await (this.bucket as any).fastPut({
|
||||
path: this.path,
|
||||
contents: json,
|
||||
overwrite: true,
|
||||
});
|
||||
}
|
||||
|
||||
public async acquireLock(holderId: string, ttlMs: number): Promise<boolean> {
|
||||
const data = await this.read();
|
||||
const now = new Date();
|
||||
const lockHeld = data.lock.holder !== null;
|
||||
const lockExpired =
|
||||
data.lock.expiresAt !== null && new Date(data.lock.expiresAt).getTime() < now.getTime();
|
||||
|
||||
if (lockHeld && !lockExpired) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const expiresAt = new Date(now.getTime() + ttlMs);
|
||||
data.lock = {
|
||||
holder: holderId,
|
||||
acquiredAt: now.toISOString(),
|
||||
expiresAt: expiresAt.toISOString(),
|
||||
};
|
||||
await this.write(data);
|
||||
|
||||
// Re-read to detect races. Best-effort only.
|
||||
const verify = await this.read();
|
||||
return verify.lock.holder === holderId;
|
||||
}
|
||||
|
||||
public async releaseLock(holderId: string): Promise<void> {
|
||||
const data = await this.read();
|
||||
if (data.lock.holder !== holderId) {
|
||||
return;
|
||||
}
|
||||
data.lock = { holder: null, acquiredAt: null, expiresAt: null };
|
||||
await this.write(data);
|
||||
}
|
||||
|
||||
public async close(): Promise<void> {
|
||||
// No persistent connection to release; the smartbucket Bucket lives on
|
||||
// the user's SmartBucket instance.
|
||||
}
|
||||
|
||||
private normalize(data: Partial<ISmartMigrationLedgerData>): ISmartMigrationLedgerData {
|
||||
return {
|
||||
currentVersion: data.currentVersion ?? null,
|
||||
steps: data.steps ?? {},
|
||||
lock: data.lock ?? { holder: null, acquiredAt: null, expiresAt: null },
|
||||
checkpoints: data.checkpoints ?? {},
|
||||
};
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user