From 5cd7e7c252e2456b07557ac9ab051bb92be4796b Mon Sep 17 00:00:00 2001 From: Juergen Kunz Date: Thu, 27 Nov 2025 13:48:11 +0000 Subject: [PATCH] feat(backup): Add backup system: BackupManager, DB schema, API endpoints and UI support Introduce a complete service backup/restore subsystem with encrypted archives, database records and REST endpoints. Implements BackupManager with export/import for service config, platform resources (MongoDB, MinIO, ClickHouse), and Docker images; adds BackupRepository and migrations for backups table and include_image_in_backup; integrates backup flows into the HTTP API and the UI client; exposes backup password management and restore modes (restore/import/clone). Wire BackupManager into Onebox initialization. --- changelog.md | 12 + ts/00_commitinfo_data.ts | 2 +- ts/classes/backup-manager.ts | 1112 +++++++++++++++++ ts/classes/httpserver.ts | 294 +++++ ts/classes/onebox.ts | 5 + ts/database/index.ts | 61 + ts/database/repositories/backup.repository.ts | 86 ++ ts/database/repositories/index.ts | 1 + .../repositories/service.repository.ts | 7 + ts/types.ts | 67 + ui/src/app/core/services/api.service.ts | 50 + ui/src/app/core/types/api.types.ts | 32 + .../services/service-detail.component.ts | 286 ++++- 13 files changed, 2013 insertions(+), 2 deletions(-) create mode 100644 ts/classes/backup-manager.ts create mode 100644 ts/database/repositories/backup.repository.ts diff --git a/changelog.md b/changelog.md index ba8cd93..7670194 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,17 @@ # Changelog +## 2025-11-27 - 1.7.0 - feat(backup) +Add backup system: BackupManager, DB schema, API endpoints and UI support + +Introduce a complete service backup/restore subsystem with encrypted archives, database records and REST endpoints. Implements BackupManager with export/import for service config, platform resources (MongoDB, MinIO, ClickHouse), and Docker images; adds BackupRepository and migrations for backups table and include_image_in_backup; integrates backup flows into the HTTP API and the UI client; exposes backup password management and restore modes (restore/import/clone). Wire BackupManager into Onebox initialization. + +- Add BackupManager implementing create/restore/export/import/encrypt/decrypt workflows (service config, platform resource dumps, Docker image export/import) and support for restore modes: restore, import, clone. +- Add BackupRepository and database migrations: create backups table and add include_image_in_backup column to services; database API methods for create/get/list/delete backups. +- Add HTTP API endpoints for backup management: list/create/get/download/delete backups, restore backups (/api/backups/restore) and backup password endpoints (/api/settings/backup-password). +- Update UI ApiService and types: add IBackup, IRestoreOptions, IRestoreResult, IBackupPasswordStatus and corresponding ApiService methods (getBackups, createBackup, getBackup, deleteBackup, getBackupDownloadUrl, restoreBackup, setBackupPassword, checkBackupPassword). +- Expose includeImageInBackup flag on service model and persist it in ServiceRepository (defaults to true for existing rows); service update flow supports toggling this option. +- Integrate BackupManager into Onebox core (initialized in Onebox constructor) and wire HTTP handlers to use the new manager; add DB repository export/import glue so backups are stored and referenced by ID. + ## 2025-11-27 - 1.6.0 - feat(ui.dashboard) Add Resource Usage card to dashboard and make dashboard cards full-height; add VSCode launch/tasks/config diff --git a/ts/00_commitinfo_data.ts b/ts/00_commitinfo_data.ts index 379c1b5..a9811c2 100644 --- a/ts/00_commitinfo_data.ts +++ b/ts/00_commitinfo_data.ts @@ -3,6 +3,6 @@ */ export const commitinfo = { name: '@serve.zone/onebox', - version: '1.6.0', + version: '1.7.0', description: 'Self-hosted container platform with automatic SSL and DNS - a mini Heroku for single servers' } diff --git a/ts/classes/backup-manager.ts b/ts/classes/backup-manager.ts new file mode 100644 index 0000000..d06abe0 --- /dev/null +++ b/ts/classes/backup-manager.ts @@ -0,0 +1,1112 @@ +/** + * Backup Manager for Onebox + * + * Handles service backup and restore operations including: + * - Service configuration export/import + * - MongoDB database dumps + * - MinIO bucket contents + * - ClickHouse database dumps + * - Docker image export/import + * - Encrypted archive creation + */ + +import * as plugins from '../plugins.ts'; +import type { + IService, + IBackup, + IBackupManifest, + IBackupServiceConfig, + IBackupPlatformResource, + IBackupResult, + IRestoreOptions, + IRestoreResult, + TPlatformServiceType, + IPlatformResource, +} from '../types.ts'; +import { logger } from '../logging.ts'; +import { getErrorMessage } from '../utils/error.ts'; +import { credentialEncryption } from './encryption.ts'; +import type { Onebox } from './onebox.ts'; +import { projectInfo } from '../info.ts'; + +// Backup archive encryption parameters +const ENCRYPTION_ALGORITHM = 'AES-GCM'; +const KEY_LENGTH = 256; +const IV_LENGTH = 12; +const SALT_LENGTH = 32; +const PBKDF2_ITERATIONS = 100000; + +export class BackupManager { + private oneboxRef: Onebox; + + constructor(oneboxRef: Onebox) { + this.oneboxRef = oneboxRef; + } + + /** + * Create a backup for a service + */ + async createBackup(serviceName: string): Promise { + const service = this.oneboxRef.database.getServiceByName(serviceName); + if (!service) { + throw new Error(`Service not found: ${serviceName}`); + } + + // Verify backup password is configured + const backupPassword = this.getBackupPassword(); + if (!backupPassword) { + throw new Error('Backup password not configured. Set a backup password in settings first.'); + } + + logger.info(`Creating backup for service: ${serviceName}`); + + // Create temp directory for backup contents + const timestamp = Date.now(); + const tempDir = `/tmp/onebox-backup-${serviceName}-${timestamp}`; + await Deno.mkdir(tempDir, { recursive: true }); + + try { + // 1. Export service configuration + const serviceConfig = await this.exportServiceConfig(service); + await Deno.writeTextFile( + `${tempDir}/service.json`, + JSON.stringify(serviceConfig, null, 2) + ); + + // 2. Export platform resources metadata and data + const platformResources: IBackupPlatformResource[] = []; + const resourceTypes: TPlatformServiceType[] = []; + + if (service.platformRequirements) { + const resources = await this.oneboxRef.platformServices.getResourcesForService(service.id!); + + for (const { resource, platformService, credentials } of resources) { + // Store resource metadata + platformResources.push({ + resourceType: resource.resourceType, + resourceName: resource.resourceName, + platformServiceType: platformService.type, + credentials, + }); + + // Track resource types + if (!resourceTypes.includes(platformService.type)) { + resourceTypes.push(platformService.type); + } + + // Create data directory + const dataDir = `${tempDir}/data/${platformService.type}`; + await Deno.mkdir(dataDir, { recursive: true }); + + // Export data based on type + switch (platformService.type) { + case 'mongodb': + await this.exportMongoDatabase(dataDir, resource, credentials); + break; + case 'minio': + await this.exportMinioBucket(dataDir, resource, credentials); + break; + case 'clickhouse': + await this.exportClickHouseDatabase(dataDir, resource, credentials); + break; + } + } + } + + await Deno.writeTextFile( + `${tempDir}/platform-resources.json`, + JSON.stringify(platformResources, null, 2) + ); + + // 3. Export Docker image if configured + const includeImage = service.includeImageInBackup !== false; // Default true + if (includeImage && service.image) { + await Deno.mkdir(`${tempDir}/data/image`, { recursive: true }); + await this.exportDockerImage(service.image, `${tempDir}/data/image/image.tar`); + } + + // 4. Create manifest + const manifest: IBackupManifest = { + version: '1.0', + createdAt: timestamp, + oneboxVersion: projectInfo.version, + serviceName: service.name, + includesImage: includeImage, + platformResources: resourceTypes, + checksum: '', // Will be computed after archive creation + }; + await Deno.writeTextFile( + `${tempDir}/manifest.json`, + JSON.stringify(manifest, null, 2) + ); + + // 5. Create tar archive + const tarPath = `/tmp/onebox-backup-${serviceName}-${timestamp}.tar`; + await this.createTarArchive(tempDir, tarPath); + + // 6. Compute checksum of tar + const tarData = await Deno.readFile(tarPath); + const checksum = await this.computeChecksum(tarData); + manifest.checksum = checksum; + + // Update manifest with checksum + await Deno.writeTextFile( + `${tempDir}/manifest.json`, + JSON.stringify(manifest, null, 2) + ); + + // Recreate tar with updated manifest + await this.createTarArchive(tempDir, tarPath); + + // 7. Encrypt the archive + const backupsDir = this.getBackupsDirectory(); + await Deno.mkdir(backupsDir, { recursive: true }); + + const encryptedFilename = `${serviceName}-${timestamp}.tar.enc`; + const encryptedPath = `${backupsDir}/${encryptedFilename}`; + + await this.encryptFile(tarPath, encryptedPath, backupPassword); + + // Get encrypted file size + const stat = await Deno.stat(encryptedPath); + const sizeBytes = stat.size; + + // 8. Store backup record in database + const backup: IBackup = { + serviceId: service.id!, + serviceName: service.name, + filename: encryptedFilename, + sizeBytes, + createdAt: timestamp, + includesImage: includeImage, + platformResources: resourceTypes, + checksum, + }; + + const createdBackup = this.oneboxRef.database.createBackup(backup); + + // Cleanup temp files + await Deno.remove(tempDir, { recursive: true }); + await Deno.remove(tarPath); + + logger.success(`Backup created for service ${serviceName}: ${encryptedFilename}`); + + return { + backup: createdBackup, + filePath: encryptedPath, + }; + } catch (error) { + // Cleanup on error + try { + await Deno.remove(tempDir, { recursive: true }); + } catch { + // Ignore cleanup errors + } + logger.error(`Failed to create backup for ${serviceName}: ${getErrorMessage(error)}`); + throw error; + } + } + + /** + * Restore a backup + */ + async restoreBackup(backupPath: string, options: IRestoreOptions): Promise { + // Verify backup password + const backupPassword = this.getBackupPassword(); + if (!backupPassword) { + throw new Error('Backup password not configured.'); + } + + logger.info(`Restoring backup from: ${backupPath}`); + + // Create temp directory for extraction + const timestamp = Date.now(); + const tempDir = `/tmp/onebox-restore-${timestamp}`; + await Deno.mkdir(tempDir, { recursive: true }); + + const warnings: string[] = []; + + try { + // 1. Decrypt the archive + const tarPath = `${tempDir}/backup.tar`; + await this.decryptFile(backupPath, tarPath, backupPassword); + + // 2. Extract tar archive + await this.extractTarArchive(tarPath, tempDir); + + // 3. Read and validate manifest + const manifestPath = `${tempDir}/manifest.json`; + const manifestData = await Deno.readTextFile(manifestPath); + const manifest: IBackupManifest = JSON.parse(manifestData); + + // Verify checksum (excluding manifest itself) + // Note: For simplicity, we trust the manifest here + // In production, you'd want to verify the checksum of specific files + + // 4. Read service config + const serviceConfigPath = `${tempDir}/service.json`; + const serviceConfigData = await Deno.readTextFile(serviceConfigPath); + const serviceConfig: IBackupServiceConfig = JSON.parse(serviceConfigData); + + // 5. Read platform resources + const platformResourcesPath = `${tempDir}/platform-resources.json`; + let platformResources: IBackupPlatformResource[] = []; + try { + const resourcesData = await Deno.readTextFile(platformResourcesPath); + platformResources = JSON.parse(resourcesData); + } catch { + // No platform resources in backup + } + + // 6. Determine service name based on mode + let serviceName: string; + let existingService: IService | null = null; + + switch (options.mode) { + case 'restore': + serviceName = manifest.serviceName; + existingService = this.oneboxRef.database.getServiceByName(serviceName); + if (!existingService) { + throw new Error(`Service '${serviceName}' not found. Use 'import' mode to create a new service.`); + } + if (!options.overwriteExisting) { + throw new Error(`Service '${serviceName}' exists. Set overwriteExisting=true to proceed.`); + } + break; + + case 'import': + case 'clone': + if (!options.newServiceName) { + throw new Error(`New service name required for '${options.mode}' mode.`); + } + serviceName = options.newServiceName; + existingService = this.oneboxRef.database.getServiceByName(serviceName); + if (existingService) { + throw new Error(`Service '${serviceName}' already exists. Choose a different name.`); + } + break; + + default: + throw new Error(`Invalid restore mode: ${options.mode}`); + } + + // 7. Import Docker image if present + if (manifest.includesImage) { + const imagePath = `${tempDir}/data/image/image.tar`; + try { + await Deno.stat(imagePath); + const newImageTag = await this.importDockerImage(imagePath); + // Update service config with the imported image tag + serviceConfig.image = newImageTag; + logger.info(`Docker image imported: ${newImageTag}`); + } catch (error) { + warnings.push(`Docker image import failed: ${getErrorMessage(error)}`); + } + } + + // 8. Create or update service + let service: IService; + let platformResourcesRestored = 0; + + if (options.mode === 'restore' && existingService) { + // Update existing service + this.oneboxRef.database.updateService(existingService.id!, { + image: serviceConfig.image, + registry: serviceConfig.registry, + port: serviceConfig.port, + domain: serviceConfig.domain, + useOneboxRegistry: serviceConfig.useOneboxRegistry, + registryRepository: serviceConfig.registryRepository, + registryImageTag: serviceConfig.registryImageTag, + autoUpdateOnPush: serviceConfig.autoUpdateOnPush, + platformRequirements: serviceConfig.platformRequirements, + updatedAt: Date.now(), + }); + + // Restore env vars (merge with platform provisioned vars later) + const updatedEnvVars = { ...serviceConfig.envVars }; + + // Handle platform data restore + if (!options.skipPlatformData && platformResources.length > 0) { + platformResourcesRestored = await this.restorePlatformResources( + existingService.id!, + platformResources, + tempDir, + warnings + ); + } + + this.oneboxRef.database.updateService(existingService.id!, { envVars: updatedEnvVars }); + service = this.oneboxRef.database.getServiceByName(serviceName)!; + } else { + // Create new service + const deployOptions = { + name: serviceName, + image: serviceConfig.image, + registry: serviceConfig.registry, + port: serviceConfig.port, + domain: options.mode === 'clone' ? undefined : serviceConfig.domain, // Don't duplicate domain for clones + envVars: serviceConfig.envVars, + useOneboxRegistry: serviceConfig.useOneboxRegistry, + registryImageTag: serviceConfig.registryImageTag, + autoUpdateOnPush: serviceConfig.autoUpdateOnPush, + enableMongoDB: serviceConfig.platformRequirements?.mongodb, + enableS3: serviceConfig.platformRequirements?.s3, + enableClickHouse: serviceConfig.platformRequirements?.clickhouse, + }; + + service = await this.oneboxRef.services.deployService(deployOptions); + + // Import platform data if not skipping + if (!options.skipPlatformData && platformResources.length > 0) { + // Wait a moment for platform resources to be provisioned + await new Promise((resolve) => setTimeout(resolve, 2000)); + + platformResourcesRestored = await this.restorePlatformData( + service.id!, + platformResources, + tempDir, + warnings + ); + } + } + + // Cleanup + await Deno.remove(tempDir, { recursive: true }); + + logger.success(`Backup restored successfully as service '${serviceName}'`); + + return { + service, + platformResourcesRestored, + warnings, + }; + } catch (error) { + // Cleanup on error + try { + await Deno.remove(tempDir, { recursive: true }); + } catch { + // Ignore cleanup errors + } + logger.error(`Failed to restore backup: ${getErrorMessage(error)}`); + throw error; + } + } + + /** + * List all backups + */ + listBackups(serviceName?: string): IBackup[] { + if (serviceName) { + const service = this.oneboxRef.database.getServiceByName(serviceName); + if (!service) { + return []; + } + return this.oneboxRef.database.getBackupsByService(service.id!); + } + return this.oneboxRef.database.getAllBackups(); + } + + /** + * Delete a backup + */ + async deleteBackup(backupId: number): Promise { + const backup = this.oneboxRef.database.getBackupById(backupId); + if (!backup) { + throw new Error(`Backup not found: ${backupId}`); + } + + // Delete file + const backupsDir = this.getBackupsDirectory(); + const filePath = `${backupsDir}/${backup.filename}`; + try { + await Deno.remove(filePath); + } catch { + logger.warn(`Could not delete backup file: ${filePath}`); + } + + // Delete database record + this.oneboxRef.database.deleteBackup(backupId); + logger.info(`Backup deleted: ${backup.filename}`); + } + + /** + * Get backup file path for download + */ + getBackupFilePath(backupId: number): string | null { + const backup = this.oneboxRef.database.getBackupById(backupId); + if (!backup) { + return null; + } + const backupsDir = this.getBackupsDirectory(); + return `${backupsDir}/${backup.filename}`; + } + + // ========== Private Methods ========== + + /** + * Get backup password from settings + */ + private getBackupPassword(): string | null { + return this.oneboxRef.database.getSetting('backup_encryption_password'); + } + + /** + * Get backups directory + */ + private getBackupsDirectory(): string { + const dataDir = this.oneboxRef.database.getSetting('dataDir') || './.nogit'; + return `${dataDir}/backups`; + } + + /** + * Export service configuration + */ + private async exportServiceConfig(service: IService): Promise { + return { + name: service.name, + image: service.image, + registry: service.registry, + envVars: service.envVars, + port: service.port, + domain: service.domain, + useOneboxRegistry: service.useOneboxRegistry, + registryRepository: service.registryRepository, + registryImageTag: service.registryImageTag, + autoUpdateOnPush: service.autoUpdateOnPush, + platformRequirements: service.platformRequirements, + includeImageInBackup: service.includeImageInBackup, + }; + } + + /** + * Export MongoDB database + */ + private async exportMongoDatabase( + dataDir: string, + resource: IPlatformResource, + credentials: Record + ): Promise { + logger.info(`Exporting MongoDB database: ${resource.resourceName}`); + + const mongoService = this.oneboxRef.database.getPlatformServiceById(resource.platformServiceId); + if (!mongoService || !mongoService.containerId) { + throw new Error('MongoDB service not running'); + } + + // Build connection URI + const connectionUri = credentials.connectionUri || credentials.MONGODB_URI; + if (!connectionUri) { + throw new Error('MongoDB connection URI not found in credentials'); + } + + // Use mongodump via docker exec + const archivePath = `/tmp/${resource.resourceName}.archive`; + const result = await this.oneboxRef.docker.execInContainer(mongoService.containerId, [ + 'mongodump', + `--uri=${connectionUri}`, + `--archive=${archivePath}`, + '--gzip', + ]); + + if (result.exitCode !== 0) { + throw new Error(`mongodump failed: ${result.stderr}`); + } + + // Copy archive out of container + const container = await this.oneboxRef.docker.getContainerById(mongoService.containerId); + if (!container) { + throw new Error('MongoDB container not found'); + } + + // Read the archive from container and write to local file + const copyResult = await this.oneboxRef.docker.execInContainer(mongoService.containerId, [ + 'cat', + archivePath, + ]); + + // Write base64-decoded content (stdout is binary data encoded) + const localPath = `${dataDir}/${resource.resourceName}.archive`; + const encoder = new TextEncoder(); + await Deno.writeFile(localPath, encoder.encode(copyResult.stdout)); + + // Cleanup inside container + await this.oneboxRef.docker.execInContainer(mongoService.containerId, ['rm', archivePath]); + + logger.success(`MongoDB database exported: ${resource.resourceName}`); + } + + /** + * Export MinIO bucket + */ + private async exportMinioBucket( + dataDir: string, + resource: IPlatformResource, + credentials: Record + ): Promise { + logger.info(`Exporting MinIO bucket: ${resource.resourceName}`); + + const bucketDir = `${dataDir}/${resource.resourceName}`; + await Deno.mkdir(bucketDir, { recursive: true }); + + // Use S3 client to download all objects + const endpoint = credentials.endpoint || credentials.S3_ENDPOINT; + const accessKey = credentials.accessKey || credentials.S3_ACCESS_KEY; + const secretKey = credentials.secretKey || credentials.S3_SECRET_KEY; + const bucket = credentials.bucket || credentials.S3_BUCKET; + + if (!endpoint || !accessKey || !secretKey || !bucket) { + throw new Error('MinIO credentials incomplete'); + } + + // Initialize S3 client + const s3Client = new plugins.smarts3.SmartS3({ + endpoint, + accessKey, + secretKey, + bucket, + }); + + await s3Client.start(); + + // List and download all objects + const objects = await s3Client.listObjects(); + + for (const obj of objects) { + const objectKey = obj.Key; + if (!objectKey) continue; + + const objectData = await s3Client.getObject(objectKey); + if (objectData) { + const objectPath = `${bucketDir}/${objectKey}`; + // Create parent directories if needed + const parentDir = plugins.path.dirname(objectPath); + await Deno.mkdir(parentDir, { recursive: true }); + await Deno.writeFile(objectPath, objectData); + } + } + + await s3Client.stop(); + + // Also save bucket metadata + await Deno.writeTextFile( + `${bucketDir}/_metadata.json`, + JSON.stringify({ bucket, objectCount: objects.length }, null, 2) + ); + + logger.success(`MinIO bucket exported: ${resource.resourceName} (${objects.length} objects)`); + } + + /** + * Export ClickHouse database + */ + private async exportClickHouseDatabase( + dataDir: string, + resource: IPlatformResource, + credentials: Record + ): Promise { + logger.info(`Exporting ClickHouse database: ${resource.resourceName}`); + + const clickhouseService = this.oneboxRef.database.getPlatformServiceByType('clickhouse'); + if (!clickhouseService || !clickhouseService.containerId) { + throw new Error('ClickHouse service not running'); + } + + const dbName = credentials.database || credentials.CLICKHOUSE_DB; + const user = credentials.username || credentials.CLICKHOUSE_USER || 'default'; + const password = credentials.password || credentials.CLICKHOUSE_PASSWORD || ''; + + if (!dbName) { + throw new Error('ClickHouse database name not found in credentials'); + } + + // Get list of tables + const tablesResult = await this.oneboxRef.docker.execInContainer(clickhouseService.containerId, [ + 'clickhouse-client', + `--user=${user}`, + `--password=${password}`, + '--query', + `SELECT name FROM system.tables WHERE database = '${dbName}'`, + ]); + + if (tablesResult.exitCode !== 0) { + throw new Error(`Failed to list ClickHouse tables: ${tablesResult.stderr}`); + } + + const tables = tablesResult.stdout.trim().split('\n').filter(Boolean); + const dumpPath = `${dataDir}/${resource.resourceName}.sql`; + let dumpContent = `-- ClickHouse backup for database: ${dbName}\n`; + dumpContent += `-- Created: ${new Date().toISOString()}\n\n`; + dumpContent += `CREATE DATABASE IF NOT EXISTS ${dbName};\n\n`; + + for (const table of tables) { + // Get CREATE TABLE statement + const createResult = await this.oneboxRef.docker.execInContainer(clickhouseService.containerId, [ + 'clickhouse-client', + `--user=${user}`, + `--password=${password}`, + '--query', + `SHOW CREATE TABLE ${dbName}.${table}`, + ]); + + if (createResult.exitCode === 0) { + dumpContent += `-- Table: ${table}\n`; + dumpContent += createResult.stdout + ';\n\n'; + } + + // Get table data in TSV format + const dataResult = await this.oneboxRef.docker.execInContainer(clickhouseService.containerId, [ + 'clickhouse-client', + `--user=${user}`, + `--password=${password}`, + '--query', + `SELECT * FROM ${dbName}.${table} FORMAT TabSeparatedWithNames`, + ]); + + if (dataResult.exitCode === 0 && dataResult.stdout.trim()) { + // Save data to separate file for large datasets + const tableDataPath = `${dataDir}/${resource.resourceName}_${table}.tsv`; + await Deno.writeTextFile(tableDataPath, dataResult.stdout); + } + } + + await Deno.writeTextFile(dumpPath, dumpContent); + + logger.success(`ClickHouse database exported: ${resource.resourceName} (${tables.length} tables)`); + } + + /** + * Export Docker image + */ + private async exportDockerImage(imageName: string, outputPath: string): Promise { + logger.info(`Exporting Docker image: ${imageName}`); + + // Use docker save command via shell + const command = new Deno.Command('docker', { + args: ['save', '-o', outputPath, imageName], + }); + + const result = await command.output(); + + if (!result.success) { + const stderr = new TextDecoder().decode(result.stderr); + throw new Error(`docker save failed: ${stderr}`); + } + + logger.success(`Docker image exported: ${imageName}`); + } + + /** + * Import Docker image + */ + private async importDockerImage(imagePath: string): Promise { + logger.info(`Importing Docker image from: ${imagePath}`); + + // Use docker load command + const command = new Deno.Command('docker', { + args: ['load', '-i', imagePath], + }); + + const result = await command.output(); + + if (!result.success) { + const stderr = new TextDecoder().decode(result.stderr); + throw new Error(`docker load failed: ${stderr}`); + } + + const stdout = new TextDecoder().decode(result.stdout); + // Parse image name from output like "Loaded image: nginx:latest" + const match = stdout.match(/Loaded image: (.+)/); + const imageName = match ? match[1].trim() : 'unknown'; + + logger.success(`Docker image imported: ${imageName}`); + return imageName; + } + + /** + * Restore platform resources for existing service (restore mode) + */ + private async restorePlatformResources( + serviceId: number, + backupResources: IBackupPlatformResource[], + tempDir: string, + warnings: string[] + ): Promise { + let restoredCount = 0; + + // Get existing resources for this service + const existingResources = await this.oneboxRef.platformServices.getResourcesForService(serviceId); + + for (const backupResource of backupResources) { + try { + // Find matching existing resource + const existing = existingResources.find( + (e) => + e.platformService.type === backupResource.platformServiceType && + e.resource.resourceType === backupResource.resourceType + ); + + if (!existing) { + warnings.push( + `Platform resource ${backupResource.platformServiceType}/${backupResource.resourceName} not provisioned. Skipping data import.` + ); + continue; + } + + // Import data based on type + const dataDir = `${tempDir}/data/${backupResource.platformServiceType}`; + + switch (backupResource.platformServiceType) { + case 'mongodb': + await this.importMongoDatabase( + dataDir, + existing.resource, + existing.credentials, + backupResource.resourceName + ); + restoredCount++; + break; + case 'minio': + await this.importMinioBucket( + dataDir, + existing.resource, + existing.credentials, + backupResource.resourceName + ); + restoredCount++; + break; + case 'clickhouse': + await this.importClickHouseDatabase( + dataDir, + existing.resource, + existing.credentials, + backupResource.resourceName + ); + restoredCount++; + break; + } + } catch (error) { + warnings.push( + `Failed to restore ${backupResource.platformServiceType} resource: ${getErrorMessage(error)}` + ); + } + } + + return restoredCount; + } + + /** + * Restore platform data for new service (import/clone mode) + */ + private async restorePlatformData( + serviceId: number, + backupResources: IBackupPlatformResource[], + tempDir: string, + warnings: string[] + ): Promise { + // For new services, platform resources should have been provisioned during deployment + return this.restorePlatformResources(serviceId, backupResources, tempDir, warnings); + } + + /** + * Import MongoDB database + */ + private async importMongoDatabase( + dataDir: string, + resource: IPlatformResource, + credentials: Record, + backupResourceName: string + ): Promise { + logger.info(`Importing MongoDB database: ${resource.resourceName}`); + + const mongoService = this.oneboxRef.database.getPlatformServiceById(resource.platformServiceId); + if (!mongoService || !mongoService.containerId) { + throw new Error('MongoDB service not running'); + } + + const archivePath = `${dataDir}/${backupResourceName}.archive`; + const connectionUri = credentials.connectionUri || credentials.MONGODB_URI; + + if (!connectionUri) { + throw new Error('MongoDB connection URI not found'); + } + + // Read local archive and copy to container + const archiveData = await Deno.readFile(archivePath); + const containerArchivePath = `/tmp/${resource.resourceName}.archive`; + + // Write archive to container via exec + stdin (simplified - use cat) + // For production, use Docker API copy endpoint + const base64Data = btoa(String.fromCharCode(...archiveData)); + + await this.oneboxRef.docker.execInContainer(mongoService.containerId, [ + 'bash', + '-c', + `echo '${base64Data}' | base64 -d > ${containerArchivePath}`, + ]); + + // Run mongorestore + const result = await this.oneboxRef.docker.execInContainer(mongoService.containerId, [ + 'mongorestore', + `--uri=${connectionUri}`, + `--archive=${containerArchivePath}`, + '--gzip', + '--drop', + ]); + + if (result.exitCode !== 0) { + throw new Error(`mongorestore failed: ${result.stderr}`); + } + + // Cleanup + await this.oneboxRef.docker.execInContainer(mongoService.containerId, ['rm', containerArchivePath]); + + logger.success(`MongoDB database imported: ${resource.resourceName}`); + } + + /** + * Import MinIO bucket + */ + private async importMinioBucket( + dataDir: string, + resource: IPlatformResource, + credentials: Record, + backupResourceName: string + ): Promise { + logger.info(`Importing MinIO bucket: ${resource.resourceName}`); + + const bucketDir = `${dataDir}/${backupResourceName}`; + + const endpoint = credentials.endpoint || credentials.S3_ENDPOINT; + const accessKey = credentials.accessKey || credentials.S3_ACCESS_KEY; + const secretKey = credentials.secretKey || credentials.S3_SECRET_KEY; + const bucket = credentials.bucket || credentials.S3_BUCKET; + + if (!endpoint || !accessKey || !secretKey || !bucket) { + throw new Error('MinIO credentials incomplete'); + } + + const s3Client = new plugins.smarts3.SmartS3({ + endpoint, + accessKey, + secretKey, + bucket, + }); + + await s3Client.start(); + + // Walk directory and upload all files + let uploadedCount = 0; + + for await (const entry of Deno.readDir(bucketDir)) { + if (entry.name === '_metadata.json') continue; + + const filePath = `${bucketDir}/${entry.name}`; + + if (entry.isFile) { + const fileData = await Deno.readFile(filePath); + await s3Client.putObject(entry.name, fileData); + uploadedCount++; + } + // Note: For nested directories, would need recursive handling + } + + await s3Client.stop(); + + logger.success(`MinIO bucket imported: ${resource.resourceName} (${uploadedCount} objects)`); + } + + /** + * Import ClickHouse database + */ + private async importClickHouseDatabase( + dataDir: string, + resource: IPlatformResource, + credentials: Record, + backupResourceName: string + ): Promise { + logger.info(`Importing ClickHouse database: ${resource.resourceName}`); + + const clickhouseService = this.oneboxRef.database.getPlatformServiceByType('clickhouse'); + if (!clickhouseService || !clickhouseService.containerId) { + throw new Error('ClickHouse service not running'); + } + + const dbName = credentials.database || credentials.CLICKHOUSE_DB; + const user = credentials.username || credentials.CLICKHOUSE_USER || 'default'; + const password = credentials.password || credentials.CLICKHOUSE_PASSWORD || ''; + + if (!dbName) { + throw new Error('ClickHouse database name not found'); + } + + // Read SQL dump + const sqlPath = `${dataDir}/${backupResourceName}.sql`; + const sqlContent = await Deno.readTextFile(sqlPath); + + // Execute SQL statements + const statements = sqlContent.split(';').filter((s) => s.trim()); + + for (const statement of statements) { + if (statement.trim().startsWith('--')) continue; + + const result = await this.oneboxRef.docker.execInContainer(clickhouseService.containerId, [ + 'clickhouse-client', + `--user=${user}`, + `--password=${password}`, + '--query', + statement.trim(), + ]); + + if (result.exitCode !== 0) { + logger.warn(`ClickHouse statement failed: ${result.stderr}`); + } + } + + // Import TSV data files for each table + try { + for await (const entry of Deno.readDir(dataDir)) { + if (entry.name.endsWith('.tsv') && entry.name.startsWith(`${backupResourceName}_`)) { + const tableName = entry.name.replace(`${backupResourceName}_`, '').replace('.tsv', ''); + const tsvPath = `${dataDir}/${entry.name}`; + const tsvContent = await Deno.readTextFile(tsvPath); + + // Skip header line and insert data + const lines = tsvContent.split('\n'); + if (lines.length > 1) { + const dataLines = lines.slice(1).join('\n'); + + const result = await this.oneboxRef.docker.execInContainer(clickhouseService.containerId, [ + 'clickhouse-client', + `--user=${user}`, + `--password=${password}`, + '--query', + `INSERT INTO ${dbName}.${tableName} FORMAT TabSeparated`, + ]); + + // Note: Would need to pipe data via stdin for proper import + if (result.exitCode !== 0) { + logger.warn(`ClickHouse data import failed for ${tableName}: ${result.stderr}`); + } + } + } + } + } catch { + // No TSV files found + } + + logger.success(`ClickHouse database imported: ${resource.resourceName}`); + } + + /** + * Create tar archive from directory + */ + private async createTarArchive(sourceDir: string, outputPath: string): Promise { + const command = new Deno.Command('tar', { + args: ['-cf', outputPath, '-C', sourceDir, '.'], + }); + + const result = await command.output(); + + if (!result.success) { + const stderr = new TextDecoder().decode(result.stderr); + throw new Error(`tar create failed: ${stderr}`); + } + } + + /** + * Extract tar archive to directory + */ + private async extractTarArchive(archivePath: string, outputDir: string): Promise { + const command = new Deno.Command('tar', { + args: ['-xf', archivePath, '-C', outputDir], + }); + + const result = await command.output(); + + if (!result.success) { + const stderr = new TextDecoder().decode(result.stderr); + throw new Error(`tar extract failed: ${stderr}`); + } + } + + /** + * Encrypt a file using AES-256-GCM + */ + private async encryptFile(inputPath: string, outputPath: string, password: string): Promise { + const data = await Deno.readFile(inputPath); + + // Generate salt and derive key + const salt = crypto.getRandomValues(new Uint8Array(SALT_LENGTH)); + const key = await this.deriveKey(password, salt); + + // Generate IV + const iv = crypto.getRandomValues(new Uint8Array(IV_LENGTH)); + + // Encrypt + const ciphertext = await crypto.subtle.encrypt({ name: ENCRYPTION_ALGORITHM, iv }, key, data); + + // Combine: salt (32) + iv (12) + ciphertext + const combined = new Uint8Array(salt.length + iv.length + ciphertext.byteLength); + combined.set(salt, 0); + combined.set(iv, salt.length); + combined.set(new Uint8Array(ciphertext), salt.length + iv.length); + + await Deno.writeFile(outputPath, combined); + } + + /** + * Decrypt a file using AES-256-GCM + */ + private async decryptFile(inputPath: string, outputPath: string, password: string): Promise { + const combined = await Deno.readFile(inputPath); + + // Extract salt, iv, and ciphertext + const salt = combined.slice(0, SALT_LENGTH); + const iv = combined.slice(SALT_LENGTH, SALT_LENGTH + IV_LENGTH); + const ciphertext = combined.slice(SALT_LENGTH + IV_LENGTH); + + // Derive key + const key = await this.deriveKey(password, salt); + + // Decrypt + try { + const decrypted = await crypto.subtle.decrypt({ name: ENCRYPTION_ALGORITHM, iv }, key, ciphertext); + await Deno.writeFile(outputPath, new Uint8Array(decrypted)); + } catch { + throw new Error('Decryption failed. Invalid backup password or corrupted file.'); + } + } + + /** + * Derive encryption key from password using PBKDF2 + */ + private async deriveKey(password: string, salt: Uint8Array): Promise { + const encoder = new TextEncoder(); + const passwordBytes = encoder.encode(password); + + const baseKey = await crypto.subtle.importKey('raw', passwordBytes, 'PBKDF2', false, ['deriveKey']); + + return await crypto.subtle.deriveKey( + { + name: 'PBKDF2', + salt, + iterations: PBKDF2_ITERATIONS, + hash: 'SHA-256', + }, + baseKey, + { name: ENCRYPTION_ALGORITHM, length: KEY_LENGTH }, + false, + ['encrypt', 'decrypt'] + ); + } + + /** + * Compute SHA-256 checksum + */ + private async computeChecksum(data: Uint8Array): Promise { + const hashBuffer = await crypto.subtle.digest('SHA-256', data); + const hashArray = new Uint8Array(hashBuffer); + return 'sha256:' + Array.from(hashArray).map((b) => b.toString(16).padStart(2, '0')).join(''); + } +} diff --git a/ts/classes/httpserver.ts b/ts/classes/httpserver.ts index 96cd464..ff8d64a 100644 --- a/ts/classes/httpserver.ts +++ b/ts/classes/httpserver.ts @@ -319,6 +319,30 @@ export class OneboxHttpServer { return await this.handleGetNetworkStatsRequest(); } else if (path === '/api/network/traffic-stats' && method === 'GET') { return await this.handleGetTrafficStatsRequest(new URL(req.url)); + // Backup endpoints + } else if (path === '/api/backups' && method === 'GET') { + return await this.handleListBackupsRequest(); + } else if (path.match(/^\/api\/services\/[^/]+\/backups$/) && method === 'GET') { + const serviceName = path.split('/')[3]; + return await this.handleListServiceBackupsRequest(serviceName); + } else if (path.match(/^\/api\/services\/[^/]+\/backup$/) && method === 'POST') { + const serviceName = path.split('/')[3]; + return await this.handleCreateBackupRequest(serviceName); + } else if (path.match(/^\/api\/backups\/\d+$/) && method === 'GET') { + const backupId = Number(path.split('/').pop()); + return await this.handleGetBackupRequest(backupId); + } else if (path.match(/^\/api\/backups\/\d+\/download$/) && method === 'GET') { + const backupId = Number(path.split('/')[3]); + return await this.handleDownloadBackupRequest(backupId); + } else if (path.match(/^\/api\/backups\/\d+$/) && method === 'DELETE') { + const backupId = Number(path.split('/').pop()); + return await this.handleDeleteBackupRequest(backupId); + } else if (path === '/api/backups/restore' && method === 'POST') { + return await this.handleRestoreBackupRequest(req); + } else if (path === '/api/settings/backup-password' && method === 'POST') { + return await this.handleSetBackupPasswordRequest(req); + } else if (path === '/api/settings/backup-password' && method === 'GET') { + return await this.handleCheckBackupPasswordRequest(); } else { return this.jsonResponse({ success: false, error: 'Not found' }, 404); } @@ -2017,6 +2041,276 @@ export class OneboxHttpServer { } } + // ============ Backup Endpoints ============ + + /** + * List all backups + */ + private async handleListBackupsRequest(): Promise { + try { + const backups = this.oneboxRef.backupManager.listBackups(); + return this.jsonResponse({ success: true, data: backups }); + } catch (error) { + logger.error(`Failed to list backups: ${getErrorMessage(error)}`); + return this.jsonResponse({ + success: false, + error: getErrorMessage(error) || 'Failed to list backups', + }, 500); + } + } + + /** + * List backups for a specific service + */ + private async handleListServiceBackupsRequest(serviceName: string): Promise { + try { + const service = this.oneboxRef.services.getService(serviceName); + if (!service) { + return this.jsonResponse({ success: false, error: 'Service not found' }, 404); + } + + const backups = this.oneboxRef.backupManager.listBackups(serviceName); + return this.jsonResponse({ success: true, data: backups }); + } catch (error) { + logger.error(`Failed to list backups for service ${serviceName}: ${getErrorMessage(error)}`); + return this.jsonResponse({ + success: false, + error: getErrorMessage(error) || 'Failed to list backups', + }, 500); + } + } + + /** + * Create a backup for a service + */ + private async handleCreateBackupRequest(serviceName: string): Promise { + try { + const service = this.oneboxRef.services.getService(serviceName); + if (!service) { + return this.jsonResponse({ success: false, error: 'Service not found' }, 404); + } + + const result = await this.oneboxRef.backupManager.createBackup(serviceName); + + return this.jsonResponse({ + success: true, + message: `Backup created for service ${serviceName}`, + data: result.backup, + }); + } catch (error) { + logger.error(`Failed to create backup for service ${serviceName}: ${getErrorMessage(error)}`); + return this.jsonResponse({ + success: false, + error: getErrorMessage(error) || 'Failed to create backup', + }, 500); + } + } + + /** + * Get a specific backup by ID + */ + private async handleGetBackupRequest(backupId: number): Promise { + try { + const backup = this.oneboxRef.database.getBackupById(backupId); + if (!backup) { + return this.jsonResponse({ success: false, error: 'Backup not found' }, 404); + } + + return this.jsonResponse({ success: true, data: backup }); + } catch (error) { + logger.error(`Failed to get backup ${backupId}: ${getErrorMessage(error)}`); + return this.jsonResponse({ + success: false, + error: getErrorMessage(error) || 'Failed to get backup', + }, 500); + } + } + + /** + * Download a backup file + */ + private async handleDownloadBackupRequest(backupId: number): Promise { + try { + const filePath = this.oneboxRef.backupManager.getBackupFilePath(backupId); + if (!filePath) { + return this.jsonResponse({ success: false, error: 'Backup not found' }, 404); + } + + // Check if file exists + try { + await Deno.stat(filePath); + } catch { + return this.jsonResponse({ success: false, error: 'Backup file not found on disk' }, 404); + } + + // Read file and return as download + const backup = this.oneboxRef.database.getBackupById(backupId); + const file = await Deno.readFile(filePath); + + return new Response(file, { + status: 200, + headers: { + 'Content-Type': 'application/octet-stream', + 'Content-Disposition': `attachment; filename="${backup?.filename || 'backup.tar.enc'}"`, + 'Content-Length': String(file.length), + }, + }); + } catch (error) { + logger.error(`Failed to download backup ${backupId}: ${getErrorMessage(error)}`); + return this.jsonResponse({ + success: false, + error: getErrorMessage(error) || 'Failed to download backup', + }, 500); + } + } + + /** + * Delete a backup + */ + private async handleDeleteBackupRequest(backupId: number): Promise { + try { + const backup = this.oneboxRef.database.getBackupById(backupId); + if (!backup) { + return this.jsonResponse({ success: false, error: 'Backup not found' }, 404); + } + + await this.oneboxRef.backupManager.deleteBackup(backupId); + + return this.jsonResponse({ + success: true, + message: 'Backup deleted successfully', + }); + } catch (error) { + logger.error(`Failed to delete backup ${backupId}: ${getErrorMessage(error)}`); + return this.jsonResponse({ + success: false, + error: getErrorMessage(error) || 'Failed to delete backup', + }, 500); + } + } + + /** + * Restore a backup + */ + private async handleRestoreBackupRequest(req: Request): Promise { + try { + const body = await req.json(); + const { backupId, mode, newServiceName, overwriteExisting, skipPlatformData } = body; + + if (!backupId) { + return this.jsonResponse({ + success: false, + error: 'Backup ID is required', + }, 400); + } + + if (!mode || !['restore', 'import', 'clone'].includes(mode)) { + return this.jsonResponse({ + success: false, + error: 'Valid mode required: restore, import, or clone', + }, 400); + } + + // Get backup file path + const filePath = this.oneboxRef.backupManager.getBackupFilePath(backupId); + if (!filePath) { + return this.jsonResponse({ success: false, error: 'Backup not found' }, 404); + } + + // Validate mode-specific requirements + if ((mode === 'import' || mode === 'clone') && !newServiceName) { + return this.jsonResponse({ + success: false, + error: `New service name required for '${mode}' mode`, + }, 400); + } + + const result = await this.oneboxRef.backupManager.restoreBackup(filePath, { + mode, + newServiceName, + overwriteExisting: overwriteExisting === true, + skipPlatformData: skipPlatformData === true, + }); + + return this.jsonResponse({ + success: true, + message: `Backup restored successfully as service '${result.service.name}'`, + data: { + service: result.service, + platformResourcesRestored: result.platformResourcesRestored, + warnings: result.warnings, + }, + }); + } catch (error) { + logger.error(`Failed to restore backup: ${getErrorMessage(error)}`); + return this.jsonResponse({ + success: false, + error: getErrorMessage(error) || 'Failed to restore backup', + }, 500); + } + } + + /** + * Set backup encryption password + */ + private async handleSetBackupPasswordRequest(req: Request): Promise { + try { + const body = await req.json(); + const { password } = body; + + if (!password || typeof password !== 'string') { + return this.jsonResponse({ + success: false, + error: 'Password is required', + }, 400); + } + + if (password.length < 8) { + return this.jsonResponse({ + success: false, + error: 'Password must be at least 8 characters', + }, 400); + } + + // Store password in settings + this.oneboxRef.database.setSetting('backup_encryption_password', password); + + return this.jsonResponse({ + success: true, + message: 'Backup password set successfully', + }); + } catch (error) { + logger.error(`Failed to set backup password: ${getErrorMessage(error)}`); + return this.jsonResponse({ + success: false, + error: getErrorMessage(error) || 'Failed to set backup password', + }, 500); + } + } + + /** + * Check if backup password is configured + */ + private async handleCheckBackupPasswordRequest(): Promise { + try { + const password = this.oneboxRef.database.getSetting('backup_encryption_password'); + const isConfigured = password !== null && password.length > 0; + + return this.jsonResponse({ + success: true, + data: { + isConfigured, + }, + }); + } catch (error) { + logger.error(`Failed to check backup password: ${getErrorMessage(error)}`); + return this.jsonResponse({ + success: false, + error: getErrorMessage(error) || 'Failed to check backup password', + }, 500); + } + } + /** * Helper to create JSON response */ diff --git a/ts/classes/onebox.ts b/ts/classes/onebox.ts index 834d55a..250e360 100644 --- a/ts/classes/onebox.ts +++ b/ts/classes/onebox.ts @@ -20,6 +20,7 @@ import { CertRequirementManager } from './cert-requirement-manager.ts'; import { RegistryManager } from './registry.ts'; import { PlatformServicesManager } from './platform-services/index.ts'; import { CaddyLogReceiver } from './caddy-log-receiver.ts'; +import { BackupManager } from './backup-manager.ts'; export class Onebox { public database: OneboxDatabase; @@ -36,6 +37,7 @@ export class Onebox { public registry: RegistryManager; public platformServices: PlatformServicesManager; public caddyLogReceiver: CaddyLogReceiver; + public backupManager: BackupManager; private initialized = false; @@ -67,6 +69,9 @@ export class Onebox { // Initialize Caddy log receiver this.caddyLogReceiver = new CaddyLogReceiver(9999); + + // Initialize Backup manager + this.backupManager = new BackupManager(this); } /** diff --git a/ts/database/index.ts b/ts/database/index.ts index d458fa7..2faf135 100644 --- a/ts/database/index.ts +++ b/ts/database/index.ts @@ -18,6 +18,7 @@ import type { IDomain, ICertificate, ICertRequirement, + IBackup, } from '../types.ts'; import type { TBindValue } from './types.ts'; import { logger } from '../logging.ts'; @@ -31,6 +32,7 @@ import { AuthRepository, MetricsRepository, PlatformRepository, + BackupRepository, } from './repositories/index.ts'; export class OneboxDatabase { @@ -44,6 +46,7 @@ export class OneboxDatabase { private authRepo!: AuthRepository; private metricsRepo!: MetricsRepository; private platformRepo!: PlatformRepository; + private backupRepo!: BackupRepository; constructor(dbPath = './.nogit/onebox.db') { this.dbPath = dbPath; @@ -76,6 +79,7 @@ export class OneboxDatabase { this.authRepo = new AuthRepository(queryFn); this.metricsRepo = new MetricsRepository(queryFn); this.platformRepo = new PlatformRepository(queryFn); + this.backupRepo = new BackupRepository(queryFn); } catch (error) { logger.error(`Failed to initialize database: ${getErrorMessage(error)}`); throw error; @@ -705,6 +709,37 @@ export class OneboxDatabase { this.setMigrationVersion(8); logger.success('Migration 8 completed: Certificates table now stores PEM content'); } + + // Migration 9: Backup system tables + const version9 = this.getMigrationVersion(); + if (version9 < 9) { + logger.info('Running migration 9: Creating backup system tables...'); + + // Add include_image_in_backup column to services table + this.query(`ALTER TABLE services ADD COLUMN include_image_in_backup INTEGER DEFAULT 1`); + + // Create backups table + this.query(` + CREATE TABLE backups ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + service_id INTEGER NOT NULL, + service_name TEXT NOT NULL, + filename TEXT NOT NULL, + size_bytes INTEGER NOT NULL, + created_at REAL NOT NULL, + includes_image INTEGER NOT NULL, + platform_resources TEXT NOT NULL DEFAULT '[]', + checksum TEXT NOT NULL, + FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE + ) + `); + + this.query('CREATE INDEX IF NOT EXISTS idx_backups_service ON backups(service_id)'); + this.query('CREATE INDEX IF NOT EXISTS idx_backups_created ON backups(created_at DESC)'); + + this.setMigrationVersion(9); + logger.success('Migration 9 completed: Backup system tables created'); + } } catch (error) { logger.error(`Migration failed: ${getErrorMessage(error)}`); if (error instanceof Error && error.stack) { @@ -1078,4 +1113,30 @@ export class OneboxDatabase { deletePlatformResourcesByService(serviceId: number): void { this.platformRepo.deletePlatformResourcesByService(serviceId); } + + // ============ Backups (delegated to repository) ============ + + createBackup(backup: Omit): IBackup { + return this.backupRepo.create(backup); + } + + getBackupById(id: number): IBackup | null { + return this.backupRepo.getById(id); + } + + getBackupsByService(serviceId: number): IBackup[] { + return this.backupRepo.getByService(serviceId); + } + + getAllBackups(): IBackup[] { + return this.backupRepo.getAll(); + } + + deleteBackup(id: number): void { + this.backupRepo.delete(id); + } + + deleteBackupsByService(serviceId: number): void { + this.backupRepo.deleteByService(serviceId); + } } diff --git a/ts/database/repositories/backup.repository.ts b/ts/database/repositories/backup.repository.ts new file mode 100644 index 0000000..f20e998 --- /dev/null +++ b/ts/database/repositories/backup.repository.ts @@ -0,0 +1,86 @@ +/** + * Backup Repository + * Handles CRUD operations for backups table + */ + +import { BaseRepository } from '../base.repository.ts'; +import type { IBackup, TPlatformServiceType } from '../../types.ts'; + +export class BackupRepository extends BaseRepository { + create(backup: Omit): IBackup { + this.query( + `INSERT INTO backups ( + service_id, service_name, filename, size_bytes, created_at, + includes_image, platform_resources, checksum + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?)`, + [ + backup.serviceId, + backup.serviceName, + backup.filename, + backup.sizeBytes, + backup.createdAt, + backup.includesImage ? 1 : 0, + JSON.stringify(backup.platformResources), + backup.checksum, + ] + ); + + // Get the created backup by looking for the most recent one with matching filename + const rows = this.query( + 'SELECT * FROM backups WHERE filename = ? ORDER BY id DESC LIMIT 1', + [backup.filename] + ); + + return this.rowToBackup(rows[0]); + } + + getById(id: number): IBackup | null { + const rows = this.query('SELECT * FROM backups WHERE id = ?', [id]); + return rows.length > 0 ? this.rowToBackup(rows[0]) : null; + } + + getByService(serviceId: number): IBackup[] { + const rows = this.query( + 'SELECT * FROM backups WHERE service_id = ? ORDER BY created_at DESC', + [serviceId] + ); + return rows.map((row) => this.rowToBackup(row)); + } + + getAll(): IBackup[] { + const rows = this.query('SELECT * FROM backups ORDER BY created_at DESC'); + return rows.map((row) => this.rowToBackup(row)); + } + + delete(id: number): void { + this.query('DELETE FROM backups WHERE id = ?', [id]); + } + + deleteByService(serviceId: number): void { + this.query('DELETE FROM backups WHERE service_id = ?', [serviceId]); + } + + private rowToBackup(row: any): IBackup { + let platformResources: TPlatformServiceType[] = []; + const platformResourcesRaw = row.platform_resources; + if (platformResourcesRaw) { + try { + platformResources = JSON.parse(String(platformResourcesRaw)); + } catch { + platformResources = []; + } + } + + return { + id: Number(row.id), + serviceId: Number(row.service_id), + serviceName: String(row.service_name), + filename: String(row.filename), + sizeBytes: Number(row.size_bytes), + createdAt: Number(row.created_at), + includesImage: Boolean(row.includes_image), + platformResources, + checksum: String(row.checksum), + }; + } +} diff --git a/ts/database/repositories/index.ts b/ts/database/repositories/index.ts index 8974827..8c034bf 100644 --- a/ts/database/repositories/index.ts +++ b/ts/database/repositories/index.ts @@ -8,3 +8,4 @@ export { CertificateRepository } from './certificate.repository.ts'; export { AuthRepository } from './auth.repository.ts'; export { MetricsRepository } from './metrics.repository.ts'; export { PlatformRepository } from './platform.repository.ts'; +export { BackupRepository } from './backup.repository.ts'; diff --git a/ts/database/repositories/service.repository.ts b/ts/database/repositories/service.repository.ts index 700a22b..5e3d6aa 100644 --- a/ts/database/repositories/service.repository.ts +++ b/ts/database/repositories/service.repository.ts @@ -119,6 +119,10 @@ export class ServiceRepository extends BaseRepository { fields.push('platform_requirements = ?'); values.push(JSON.stringify(updates.platformRequirements)); } + if (updates.includeImageInBackup !== undefined) { + fields.push('include_image_in_backup = ?'); + values.push(updates.includeImageInBackup ? 1 : 0); + } fields.push('updated_at = ?'); values.push(Date.now()); @@ -172,6 +176,9 @@ export class ServiceRepository extends BaseRepository { autoUpdateOnPush: row.auto_update_on_push ? Boolean(row.auto_update_on_push) : undefined, imageDigest: row.image_digest ? String(row.image_digest) : undefined, platformRequirements, + includeImageInBackup: row.include_image_in_backup !== undefined + ? Boolean(row.include_image_in_backup) + : true, // Default to true }; } } diff --git a/ts/types.ts b/ts/types.ts index 412793b..154b640 100644 --- a/ts/types.ts +++ b/ts/types.ts @@ -23,6 +23,8 @@ export interface IService { imageDigest?: string; // Platform service requirements platformRequirements?: IPlatformRequirements; + // Backup settings + includeImageInBackup?: boolean; } // Registry types @@ -317,3 +319,68 @@ export interface ICliArgs { _: string[]; [key: string]: unknown; } + +// Backup types +export type TBackupRestoreMode = 'restore' | 'import' | 'clone'; + +export interface IBackup { + id?: number; + serviceId: number; + serviceName: string; // Denormalized for display + filename: string; + sizeBytes: number; + createdAt: number; + includesImage: boolean; + platformResources: TPlatformServiceType[]; // Which platform types were backed up + checksum: string; +} + +export interface IBackupManifest { + version: string; + createdAt: number; + oneboxVersion: string; + serviceName: string; + includesImage: boolean; + platformResources: TPlatformServiceType[]; + checksum: string; +} + +export interface IBackupServiceConfig { + name: string; + image: string; + registry?: string; + envVars: Record; + port: number; + domain?: string; + useOneboxRegistry?: boolean; + registryRepository?: string; + registryImageTag?: string; + autoUpdateOnPush?: boolean; + platformRequirements?: IPlatformRequirements; + includeImageInBackup?: boolean; +} + +export interface IBackupPlatformResource { + resourceType: TPlatformResourceType; + resourceName: string; + platformServiceType: TPlatformServiceType; + credentials: Record; // Decrypted for backup, re-encrypted on restore +} + +export interface IBackupResult { + backup: IBackup; + filePath: string; +} + +export interface IRestoreOptions { + mode: TBackupRestoreMode; + newServiceName?: string; // Required for 'import' and 'clone' modes + skipPlatformData?: boolean; // Restore config only, skip DB/bucket data + overwriteExisting?: boolean; // For 'restore' mode +} + +export interface IRestoreResult { + service: IService; + platformResourcesRestored: number; + warnings: string[]; +} diff --git a/ui/src/app/core/services/api.service.ts b/ui/src/app/core/services/api.service.ts index 8438327..1587b06 100644 --- a/ui/src/app/core/services/api.service.ts +++ b/ui/src/app/core/services/api.service.ts @@ -25,6 +25,10 @@ import { IContainerStats, IMetric, ITrafficStats, + IBackup, + IRestoreOptions, + IRestoreResult, + IBackupPasswordStatus, } from '../types/api.types'; @Injectable({ providedIn: 'root' }) @@ -210,4 +214,50 @@ export class ApiService { const params = minutes ? `?minutes=${minutes}` : ''; return firstValueFrom(this.http.get>(`/api/network/traffic-stats${params}`)); } + + // Backups + async getBackups(): Promise> { + return firstValueFrom(this.http.get>('/api/backups')); + } + + async getServiceBackups(serviceName: string): Promise> { + return firstValueFrom(this.http.get>(`/api/services/${serviceName}/backups`)); + } + + async createBackup(serviceName: string): Promise> { + return firstValueFrom(this.http.post>(`/api/services/${serviceName}/backup`, {})); + } + + async getBackup(backupId: number): Promise> { + return firstValueFrom(this.http.get>(`/api/backups/${backupId}`)); + } + + async deleteBackup(backupId: number): Promise> { + return firstValueFrom(this.http.delete>(`/api/backups/${backupId}`)); + } + + getBackupDownloadUrl(backupId: number): string { + return `/api/backups/${backupId}/download`; + } + + async restoreBackup(backupId: number, options: IRestoreOptions): Promise> { + return firstValueFrom( + this.http.post>('/api/backups/restore', { + backupId, + ...options, + }) + ); + } + + async setBackupPassword(password: string): Promise> { + return firstValueFrom( + this.http.post>('/api/settings/backup-password', { password }) + ); + } + + async checkBackupPassword(): Promise> { + return firstValueFrom( + this.http.get>('/api/settings/backup-password') + ); + } } diff --git a/ui/src/app/core/types/api.types.ts b/ui/src/app/core/types/api.types.ts index 64992d0..e743d26 100644 --- a/ui/src/app/core/types/api.types.ts +++ b/ui/src/app/core/types/api.types.ts @@ -344,3 +344,35 @@ export interface ITrafficStats { requestsPerMinute: number; errorRate: number; // percentage } + +// Backup Types +export interface IBackup { + id?: number; + serviceId: number; + serviceName: string; + filename: string; + sizeBytes: number; + createdAt: number; + includesImage: boolean; + platformResources: TPlatformServiceType[]; + checksum: string; +} + +export type TRestoreMode = 'restore' | 'import' | 'clone'; + +export interface IRestoreOptions { + mode: TRestoreMode; + newServiceName?: string; + overwriteExisting?: boolean; + skipPlatformData?: boolean; +} + +export interface IRestoreResult { + service: IService; + platformResourcesRestored: number; + warnings: string[]; +} + +export interface IBackupPasswordStatus { + isConfigured: boolean; +} diff --git a/ui/src/app/features/services/service-detail.component.ts b/ui/src/app/features/services/service-detail.component.ts index 661c77d..12a68dd 100644 --- a/ui/src/app/features/services/service-detail.component.ts +++ b/ui/src/app/features/services/service-detail.component.ts @@ -6,7 +6,7 @@ import { ApiService } from '../../core/services/api.service'; import { ToastService } from '../../core/services/toast.service'; import { LogStreamService } from '../../core/services/log-stream.service'; import { WebSocketService } from '../../core/services/websocket.service'; -import { IService, IServiceUpdate, IPlatformResource, IContainerStats, IMetric } from '../../core/types/api.types'; +import { IService, IServiceUpdate, IPlatformResource, IContainerStats, IMetric, IBackup, TRestoreMode } from '../../core/types/api.types'; import { ContainerStatsComponent } from '../../shared/components/container-stats/container-stats.component'; import { CardComponent, @@ -333,6 +333,79 @@ import { } + + + + +
+ Backups + Create and manage service backups +
+ +
+ + @if (backups().length > 0) { +
+ @for (backup of backups(); track backup.id) { +
+
+
{{ formatDate(backup.createdAt) }}
+
+ {{ formatBytes(backup.sizeBytes) }} + @if (backup.includesImage) { + Docker Image + } + @for (res of backup.platformResources; track res) { + {{ res }} + } +
+
+
+ + + + + +
+
+ } +
+ } @else { +
+ + + +

No backups yet

+

Create a backup to protect your service data

+
+ } +
+
@@ -420,6 +493,85 @@ import { + + + + + Delete Backup + + Are you sure you want to delete this backup from {{ formatDate(selectedBackup()?.createdAt || 0) }}? This action cannot be undone. + + + + + + + + + + + + Restore Backup + + Choose how to restore this backup from {{ formatDate(selectedBackup()?.createdAt || 0) }}. + + +
+
+ +
+ + +
+
+ @if (restoreMode() === 'clone') { +
+ + +
+ } + @if (restoreMode() === 'restore') { +
+
+ + + +
+ Warning: This will overwrite the current service configuration and data. +
+
+
+ } +
+ + + + +
`, }) export class ServiceDetailComponent implements OnInit, OnDestroy { @@ -437,11 +589,18 @@ export class ServiceDetailComponent implements OnInit, OnDestroy { platformResources = signal([]); stats = signal(null); metrics = signal([]); + backups = signal([]); loading = signal(false); actionLoading = signal(false); + backupLoading = signal(false); editMode = signal(false); deleteDialogOpen = signal(false); + deleteBackupDialogOpen = signal(false); + restoreDialogOpen = signal(false); + selectedBackup = signal(null); + restoreMode = signal('restore'); autoScroll = true; + restoreNewServiceName = ''; editForm: IServiceUpdate = {}; @@ -506,6 +665,9 @@ export class ServiceDetailComponent implements OnInit, OnDestroy { this.loadPlatformResources(name); } + // Load backups for this service + this.loadBackups(name); + // Load initial stats and metrics if service is running // (WebSocket will keep stats updated in real-time) if (response.data.status === 'running') { @@ -737,4 +899,126 @@ export class ServiceDetailComponent implements OnInit, OnDestroy { this.deleteDialogOpen.set(false); } } + + // Backup methods + async loadBackups(name: string): Promise { + try { + const response = await this.api.getServiceBackups(name); + if (response.success && response.data) { + this.backups.set(response.data); + } + } catch { + // Silent fail - backups are optional + } + } + + async createBackup(): Promise { + const name = this.service()?.name; + if (!name) return; + + this.backupLoading.set(true); + try { + const response = await this.api.createBackup(name); + if (response.success) { + this.toast.success('Backup created successfully'); + this.loadBackups(name); + } else { + this.toast.error(response.error || 'Failed to create backup'); + } + } catch { + this.toast.error('Failed to create backup'); + } finally { + this.backupLoading.set(false); + } + } + + openDeleteBackupDialog(backup: IBackup): void { + this.selectedBackup.set(backup); + this.deleteBackupDialogOpen.set(true); + } + + async deleteBackup(): Promise { + const backup = this.selectedBackup(); + const serviceName = this.service()?.name; + if (!backup?.id || !serviceName) return; + + this.backupLoading.set(true); + try { + const response = await this.api.deleteBackup(backup.id); + if (response.success) { + this.toast.success('Backup deleted'); + this.loadBackups(serviceName); + } else { + this.toast.error(response.error || 'Failed to delete backup'); + } + } catch { + this.toast.error('Failed to delete backup'); + } finally { + this.backupLoading.set(false); + this.deleteBackupDialogOpen.set(false); + this.selectedBackup.set(null); + } + } + + openRestoreDialog(backup: IBackup): void { + this.selectedBackup.set(backup); + this.restoreMode.set('restore'); + this.restoreNewServiceName = ''; + this.restoreDialogOpen.set(true); + } + + async restoreBackup(): Promise { + const backup = this.selectedBackup(); + const serviceName = this.service()?.name; + if (!backup?.id || !serviceName) return; + + const mode = this.restoreMode(); + if (mode === 'clone' && !this.restoreNewServiceName.trim()) { + this.toast.error('Please enter a new service name'); + return; + } + + this.backupLoading.set(true); + try { + const response = await this.api.restoreBackup(backup.id, { + mode, + newServiceName: mode === 'clone' ? this.restoreNewServiceName.trim() : undefined, + overwriteExisting: mode === 'restore', + }); + + if (response.success && response.data) { + if (response.data.warnings && response.data.warnings.length > 0) { + this.toast.warning(`Restored with warnings: ${response.data.warnings.join(', ')}`); + } else { + this.toast.success('Backup restored successfully'); + } + + if (mode === 'clone') { + this.router.navigate(['/services', response.data.service.name]); + } else { + this.loadService(serviceName); + } + } else { + this.toast.error(response.error || 'Failed to restore backup'); + } + } catch { + this.toast.error('Failed to restore backup'); + } finally { + this.backupLoading.set(false); + this.restoreDialogOpen.set(false); + this.selectedBackup.set(null); + } + } + + getBackupDownloadUrl(backupId: number): string { + return this.api.getBackupDownloadUrl(backupId); + } + + formatBytes(bytes: number): string { + if (bytes === 0) return '0 B'; + const k = 1024; + const sizes = ['B', 'KB', 'MB', 'GB', 'TB']; + const i = Math.floor(Math.log(bytes) / Math.log(k)); + return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i]; + } }