feat: add backup replication targets
This commit is contained in:
@@ -0,0 +1,194 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
|
||||
type TArchiveObject = {
|
||||
path: string;
|
||||
size: number;
|
||||
sha256: string;
|
||||
};
|
||||
type TTargetType = 's3' | 'smb';
|
||||
|
||||
export interface IBackupTargetWriter {
|
||||
targetType: TTargetType;
|
||||
hasObject(pathArg: string, objectArg: TArchiveObject): Promise<boolean>;
|
||||
putObject(pathArg: string, objectArg: TArchiveObject, contentsArg: Buffer): Promise<void>;
|
||||
readObject(pathArg: string): Promise<Buffer>;
|
||||
}
|
||||
|
||||
const requiredEnv = (nameArg: string) => {
|
||||
const value = process.env[nameArg];
|
||||
if (!value) {
|
||||
throw new Error(`Missing required backup target env ${nameArg}`);
|
||||
}
|
||||
return value;
|
||||
};
|
||||
|
||||
const normalizeRemotePath = (pathArg: string) => {
|
||||
const normalized = plugins.path.posix
|
||||
.normalize(String(pathArg || '').replace(/\\/g, '/').trim())
|
||||
.replace(/^\/+/, '');
|
||||
if (!normalized || normalized === '.' || normalized.startsWith('../') || normalized.includes('/../')) {
|
||||
throw new Error(`Invalid backup target path ${pathArg}`);
|
||||
}
|
||||
return normalized;
|
||||
};
|
||||
|
||||
const getBufferSha256 = (contentsArg: Buffer) => {
|
||||
return plugins.crypto.createHash('sha256').update(contentsArg).digest('hex');
|
||||
};
|
||||
|
||||
const assertObjectMatches = (objectArg: TArchiveObject, contentsArg: Buffer, labelArg: string) => {
|
||||
const sha256 = getBufferSha256(contentsArg);
|
||||
if (contentsArg.length !== objectArg.size || sha256 !== objectArg.sha256) {
|
||||
throw new Error(`Backup target checksum mismatch for ${labelArg}`);
|
||||
}
|
||||
};
|
||||
|
||||
const objectMatches = (objectArg: TArchiveObject, contentsArg: Buffer) => {
|
||||
return contentsArg.length === objectArg.size && getBufferSha256(contentsArg) === objectArg.sha256;
|
||||
};
|
||||
|
||||
class S3BackupTargetWriter implements IBackupTargetWriter {
|
||||
public targetType: TTargetType = 's3';
|
||||
private bucketPromise?: Promise<any>;
|
||||
|
||||
private async getBucket() {
|
||||
if (!this.bucketPromise) {
|
||||
this.bucketPromise = (async () => {
|
||||
const smartBucket = new plugins.smartbucket.SmartBucket({
|
||||
endpoint: requiredEnv('CLOUDLY_BACKUP_S3_ENDPOINT'),
|
||||
accessKey: requiredEnv('CLOUDLY_BACKUP_S3_ACCESS_KEY'),
|
||||
accessSecret: requiredEnv('CLOUDLY_BACKUP_S3_SECRET_KEY'),
|
||||
region: process.env.CLOUDLY_BACKUP_S3_REGION || 'us-east-1',
|
||||
...(process.env.CLOUDLY_BACKUP_S3_PORT
|
||||
? { port: Number(process.env.CLOUDLY_BACKUP_S3_PORT) }
|
||||
: {}),
|
||||
...(process.env.CLOUDLY_BACKUP_S3_USE_SSL
|
||||
? { useSsl: process.env.CLOUDLY_BACKUP_S3_USE_SSL !== 'false' }
|
||||
: {}),
|
||||
} as any);
|
||||
const bucketName = requiredEnv('CLOUDLY_BACKUP_S3_BUCKET');
|
||||
return await smartBucket.getBucketByName(bucketName) || await smartBucket.createBucket(bucketName);
|
||||
})();
|
||||
}
|
||||
return await this.bucketPromise;
|
||||
}
|
||||
|
||||
public async hasObject(pathArg: string, objectArg: TArchiveObject) {
|
||||
try {
|
||||
return objectMatches(objectArg, await this.readObject(pathArg));
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public async putObject(pathArg: string, objectArg: TArchiveObject, contentsArg: Buffer) {
|
||||
const targetPath = normalizeRemotePath(pathArg);
|
||||
assertObjectMatches(objectArg, contentsArg, targetPath);
|
||||
const bucket = await this.getBucket();
|
||||
const tempPath = `${targetPath}.upload-${Date.now()}-${plugins.smartunique.shortId()}.tmp`;
|
||||
try {
|
||||
await bucket.fastPut({ path: tempPath, contents: contentsArg, overwrite: true });
|
||||
assertObjectMatches(objectArg, await bucket.fastGet({ path: tempPath }), tempPath);
|
||||
await bucket.fastMove({ sourcePath: tempPath, destinationPath: targetPath, overwrite: true });
|
||||
assertObjectMatches(objectArg, await bucket.fastGet({ path: targetPath }), targetPath);
|
||||
} finally {
|
||||
await bucket.fastRemove({ path: tempPath }).catch(() => {});
|
||||
}
|
||||
}
|
||||
|
||||
public async readObject(pathArg: string) {
|
||||
const bucket = await this.getBucket();
|
||||
return await bucket.fastGet({ path: normalizeRemotePath(pathArg) });
|
||||
}
|
||||
}
|
||||
|
||||
class SmbBackupTargetWriter implements IBackupTargetWriter {
|
||||
public targetType: TTargetType = 'smb';
|
||||
private clientPromise?: Promise<plugins.smartsamba.SambaClient>;
|
||||
|
||||
private async getClient() {
|
||||
if (!this.clientPromise) {
|
||||
this.clientPromise = (async () => {
|
||||
const client = new plugins.smartsamba.SambaClient({
|
||||
host: requiredEnv('CLOUDLY_BACKUP_SMB_HOST'),
|
||||
...(process.env.CLOUDLY_BACKUP_SMB_PORT
|
||||
? { port: Number(process.env.CLOUDLY_BACKUP_SMB_PORT) }
|
||||
: {}),
|
||||
auth: {
|
||||
...(process.env.CLOUDLY_BACKUP_SMB_USERNAME
|
||||
? { username: process.env.CLOUDLY_BACKUP_SMB_USERNAME }
|
||||
: {}),
|
||||
...(process.env.CLOUDLY_BACKUP_SMB_PASSWORD
|
||||
? { password: process.env.CLOUDLY_BACKUP_SMB_PASSWORD }
|
||||
: {}),
|
||||
...(process.env.CLOUDLY_BACKUP_SMB_DOMAIN
|
||||
? { domain: process.env.CLOUDLY_BACKUP_SMB_DOMAIN }
|
||||
: {}),
|
||||
},
|
||||
});
|
||||
await client.start();
|
||||
return client;
|
||||
})();
|
||||
}
|
||||
return await this.clientPromise;
|
||||
}
|
||||
|
||||
private getShare() {
|
||||
return requiredEnv('CLOUDLY_BACKUP_SMB_SHARE');
|
||||
}
|
||||
|
||||
private async ensureParentDirectory(pathArg: string) {
|
||||
const client = await this.getClient();
|
||||
const parent = plugins.path.posix.dirname(pathArg);
|
||||
if (!parent || parent === '.') {
|
||||
return;
|
||||
}
|
||||
const parts = parent.split('/').filter(Boolean);
|
||||
let current = '';
|
||||
for (const part of parts) {
|
||||
current = current ? `${current}/${part}` : part;
|
||||
await client.createDirectory(this.getShare(), current).catch(() => {});
|
||||
}
|
||||
}
|
||||
|
||||
public async hasObject(pathArg: string, objectArg: TArchiveObject) {
|
||||
try {
|
||||
return objectMatches(objectArg, await this.readObject(pathArg));
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public async putObject(pathArg: string, objectArg: TArchiveObject, contentsArg: Buffer) {
|
||||
const targetPath = normalizeRemotePath(pathArg);
|
||||
assertObjectMatches(objectArg, contentsArg, targetPath);
|
||||
const client = await this.getClient();
|
||||
const share = this.getShare();
|
||||
const tempPath = `${targetPath}.upload-${Date.now()}-${plugins.smartunique.shortId()}.tmp`;
|
||||
await this.ensureParentDirectory(targetPath);
|
||||
try {
|
||||
await client.writeFile(share, tempPath, contentsArg);
|
||||
assertObjectMatches(objectArg, await client.readFile(share, tempPath), tempPath);
|
||||
await client.deleteFile(share, targetPath).catch(() => {});
|
||||
await client.rename(share, tempPath, targetPath);
|
||||
assertObjectMatches(objectArg, await client.readFile(share, targetPath), targetPath);
|
||||
} finally {
|
||||
await client.deleteFile(share, tempPath).catch(() => {});
|
||||
}
|
||||
}
|
||||
|
||||
public async readObject(pathArg: string) {
|
||||
return await (await this.getClient()).readFile(this.getShare(), normalizeRemotePath(pathArg));
|
||||
}
|
||||
}
|
||||
|
||||
export const createBackupTargetWriterFromEnv = (): IBackupTargetWriter => {
|
||||
const targetType = process.env.CLOUDLY_BACKUP_TARGET_TYPE as TTargetType | undefined;
|
||||
if (targetType === 's3') {
|
||||
return new S3BackupTargetWriter();
|
||||
}
|
||||
if (targetType === 'smb') {
|
||||
return new SmbBackupTargetWriter();
|
||||
}
|
||||
throw new Error('No remote backup target configured. Set CLOUDLY_BACKUP_TARGET_TYPE to s3 or smb.');
|
||||
};
|
||||
Reference in New Issue
Block a user