diff --git a/changelog.md b/changelog.md index fe0082c..1d47af9 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,12 @@ # Changelog +## 2024-12-20 - 1.2.2 - fix(core) +Refactored configuration management classes and improved service update handling + +- Replaced SparkLocalConfig with SparkConfig for configuration management. +- Improved service handling and update check logic. +- Consolidated service definition and update logic for better maintainability. + ## 2024-12-19 - 1.2.1 - fix(taskmanager) Remove checkinSlackTask from SparkTaskManager for streamlined task management diff --git a/ts/00_commitinfo_data.ts b/ts/00_commitinfo_data.ts index f350e84..7ed0928 100644 --- a/ts/00_commitinfo_data.ts +++ b/ts/00_commitinfo_data.ts @@ -3,6 +3,6 @@ */ export const commitinfo = { name: '@serve.zone/spark', - version: '1.2.1', + version: '1.2.2', description: 'A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure and used by @serve.zone/cloudly as a cluster node server system manager.' } diff --git a/ts/spark.classes.config.ts b/ts/spark.classes.config.ts index 5193bf4..4f90248 100644 --- a/ts/spark.classes.config.ts +++ b/ts/spark.classes.config.ts @@ -3,7 +3,12 @@ import { Spark } from './index.js'; export class SparkConfig { public sparkRef: Spark; + public kvStore: plugins.npmextra.KeyValueStore; constructor(sparkRefArg: Spark) { this.sparkRef = sparkRefArg; + this.kvStore = new plugins.npmextra.KeyValueStore({ + typeArg: 'userHomeDir', + identityArg: 'servezone_spark', + }); } } diff --git a/ts/spark.classes.localconfig.ts b/ts/spark.classes.localconfig.ts deleted file mode 100644 index 02bce91..0000000 --- a/ts/spark.classes.localconfig.ts +++ /dev/null @@ -1,15 +0,0 @@ -import * as plugins from './spark.plugins.js'; -import { Spark } from './index.js'; - -export class SparkLocalConfig { - public sparkRef: Spark; - private kvStore: plugins.npmextra.KeyValueStore; - - constructor(sparkRefArg: Spark) { - this.sparkRef = sparkRefArg; - this.kvStore = new plugins.npmextra.KeyValueStore({ - typeArg: 'userHomeDir', - identityArg: 'servezone_spark', - }); - } -} diff --git a/ts/spark.classes.spark.ts b/ts/spark.classes.spark.ts index b383d65..e6688a2 100644 --- a/ts/spark.classes.spark.ts +++ b/ts/spark.classes.spark.ts @@ -1,23 +1,23 @@ import * as plugins from './spark.plugins.js'; import { SparkTaskManager } from './spark.classes.taskmanager.js'; import { SparkInfo } from './spark.classes.info.js'; -import { SparkUpdateManager } from './spark.classes.updatemanager.js'; +import { SparkServicesManager } from './spark.classes.updatemanager.js'; import { logger } from './spark.logging.js'; -import { SparkLocalConfig } from './spark.classes.localconfig.js'; +import { SparkConfig } from './spark.classes.config.js'; export class Spark { public smartdaemon: plugins.smartdaemon.SmartDaemon; - public sparkLocalConfig: SparkLocalConfig; + public sparkConfig: SparkConfig; public sparkTaskManager: SparkTaskManager; public sparkInfo: SparkInfo; - public sparkUpdateManager: SparkUpdateManager; + public sparkUpdateManager: SparkServicesManager; constructor() { this.smartdaemon = new plugins.smartdaemon.SmartDaemon(); - this.sparkLocalConfig = new SparkLocalConfig(this); + this.sparkConfig = new SparkConfig(this); this.sparkInfo = new SparkInfo(this); this.sparkTaskManager = new SparkTaskManager(this); - this.sparkUpdateManager = new SparkUpdateManager(this); + this.sparkUpdateManager = new SparkServicesManager(this); } public async daemonStart() { diff --git a/ts/spark.classes.taskmanager.ts b/ts/spark.classes.taskmanager.ts index 7e0a7a8..1d6d34f 100644 --- a/ts/spark.classes.taskmanager.ts +++ b/ts/spark.classes.taskmanager.ts @@ -10,7 +10,7 @@ export class SparkTaskManager { // tasks public updateSpark: plugins.taskbuffer.Task; public updateHost: plugins.taskbuffer.Task; - public updateCloudly: plugins.taskbuffer.Task; + public updateServices: plugins.taskbuffer.Task; constructor(sparkRefArg: Spark) { this.sparkRef = sparkRefArg; @@ -56,7 +56,10 @@ export class SparkTaskManager { }, }); - this.updateCloudly = new plugins.taskbuffer.Task({ + /** + * only being run when mode is cloudly + */ + this.updateServices = new plugins.taskbuffer.Task({ name: 'updateCloudly', taskFunction: async () => { logger.log('info', 'now running updateCloudly task'); @@ -69,9 +72,9 @@ export class SparkTaskManager { * start the taskmanager */ public async start() { + this.taskmanager.addAndScheduleTask(this.updateServices, '30 */2 * * * *'); this.taskmanager.addAndScheduleTask(this.updateSpark, '0 * * * * *'); this.taskmanager.addAndScheduleTask(this.updateHost, '0 0 0 * * *'); - this.taskmanager.addAndScheduleTask(this.updateCloudly, '30 */2 * * * *'); this.taskmanager.start(); } @@ -81,7 +84,7 @@ export class SparkTaskManager { public async stop() { this.taskmanager.descheduleTask(this.updateSpark); this.taskmanager.descheduleTask(this.updateHost); - this.taskmanager.descheduleTask(this.updateCloudly); + this.taskmanager.descheduleTask(this.updateServices); this.taskmanager.stop(); } } diff --git a/ts/spark.classes.updatemanager.ts b/ts/spark.classes.updatemanager.ts index 735fd14..7af3431 100644 --- a/ts/spark.classes.updatemanager.ts +++ b/ts/spark.classes.updatemanager.ts @@ -3,10 +3,26 @@ import * as paths from './spark.paths.js'; import { Spark } from './spark.classes.spark.js'; import { logger } from './spark.logging.js'; -export class SparkUpdateManager { +/** + * this class takes care of updating the services that are managed by spark + */ +export class SparkServicesManager { public sparkRef: Spark; public dockerHost: plugins.docker.DockerHost; public smartupdate: plugins.smartupdate.SmartUpdate; + + /** + * the services that are managed by spark + */ + services: Array<{ + name: string; + image: string; + url: string; + port: string; + environment: string; + secretJson: any; + }> = []; + constructor(sparkrefArg: Spark) { this.sparkRef = sparkrefArg; this.dockerHost = new plugins.docker.DockerHost({}); @@ -21,109 +37,58 @@ export class SparkUpdateManager { } public async updateServices() { - if ( - plugins.smartfile.fs.isDirectory(plugins.path.join(paths.homeDir, 'serve.zone/spark')) && - (await plugins.smartfile.fs.fileExists( - plugins.path.join(paths.homeDir, 'serve.zone/spark/spark.json') - )) - ) { - const services: Array<{ - name: string; - image: string; - url: string; - port: string; - environment: string; - secretJson: any; - }> = []; - // lets add coreflow - services.push({ - name: `coreflow`, - image: `code.foss.global/serve.zone/coreflow`, - url: `coreflow`, - environment: `production`, - port: `3000`, - secretJson: { - SERVEZONE_PORT: `3000`, - SERVEZONE_ENVIRONMENT: `production`, - }, - }); - - services.push({ - name: `coretraffic`, - image: `code.foss.global/serve.zone/coretraffic`, - url: `coreflow`, - environment: `production`, - port: `3000`, - secretJson: { - SERVEZONE_PORT: `3000`, - SERVEZONE_ENVIRONMENT: `production`, - }, - }); - - services.push({ - name: `corelog`, - image: `code.foss.global/serve.zone/corelog`, - url: `coreflow`, - environment: `production`, - port: `3000`, - secretJson: { - SERVEZONE_PORT: `3000`, - SERVEZONE_ENVIRONMENT: `production`, - }, - }); - - // lets add coretraffic - - for (const service of services) { - const existingService = await plugins.docker.DockerService.getServiceByName( - this.dockerHost, - service.name - ); - const existingServiceSecret = await plugins.docker.DockerSecret.getSecretByName( - this.dockerHost, - `${service.name}Secret` - ); - if (existingService) { - const needsUpdate: boolean = await existingService.needsUpdate(); - if (!needsUpdate) { - logger.log('info', `not needing update.`); - // we simply return here to end the functions - return; - } - logger.log('ok', `${service.name} needs to be updated!`); - await existingService.remove(); - await existingServiceSecret.remove(); + for (const service of this.services) { + const existingService = await plugins.docker.DockerService.getServiceByName( + this.dockerHost, + service.name + ); + const existingServiceSecret = await plugins.docker.DockerSecret.getSecretByName( + this.dockerHost, + `${service.name}Secret` + ); + if (existingService) { + const needsUpdate: boolean = await existingService.needsUpdate(); + if (!needsUpdate) { + logger.log('info', `service >>${service.name}<< not needing update.`); + // we simply return here to end the functions + return; } - if (!existingService && existingServiceSecret) { - await existingServiceSecret.remove(); - } - - const newServiceImage = await plugins.docker.DockerImage.createFromRegistry( - this.dockerHost, - { - creationObject: { - imageUrl: service.image, - }, - } - ); - const newServiceSecret = await plugins.docker.DockerSecret.createSecret(this.dockerHost, { - name: `${service.name}Secret`, - contentArg: plugins.smartjson.stringify(service.secretJson), - version: await newServiceImage.getVersion(), - labels: {}, - }); - const newService = await plugins.docker.DockerService.createService(this.dockerHost, { - image: newServiceImage, - labels: {}, - name: service.name, - networkAlias: service.name, - networks: [], - secrets: [newServiceSecret], - ports: [`${service.port}:${service.secretJson.SERVEZONE_PORT}`], - }); - logger.log('ok', `updated service >>${newService.Spec.Name}<>${newService.Spec.Name}< { smartcliInstance.addCommand('asdaemon').subscribe(async (argvArg) => { logger.log('success', 'looks like we are running as daemon now'); logger.log('info', 'starting spark in daemon mode'); + + // lets determine the mode if specified + let mode = argvArg.mode; + if (mode === 'cloudly') { + await sparkInstance.sparkConfig.kvStore.writeKey('mode', 'cloudly'); + } else if (mode === 'coreflow-node') { + await sparkInstance.sparkConfig.kvStore.writeKey('mode', 'coreflow-node'); + } else if (mode) { + logger.log('error', 'unknown mode specified'); + process.exit(1); + } else { + // mode is not specified by cli, lets get it from the config + mode = await sparkInstance.sparkConfig.kvStore.readKey('mode'); + } + + if (!mode) { + logger.log('error', 'no mode specified by either cli or config'); + process.exit(1); + } else if (mode === 'cloudly') { + sparkInstance.sparkUpdateManager.services.push({ + name: `coreflow`, + image: `code.foss.global/serve.zone/cloudly`, + url: `cloudly`, + environment: `production`, + port: `3000`, + secretJson: { + SERVEZONE_PORT: `3000`, + SERVEZONE_ENVIRONMENT: `production`, + }, + }); + } else if (mode === 'coreflow-node') { + sparkInstance.sparkUpdateManager.services.push({ + name: `coreflow`, + image: `code.foss.global/serve.zone/coreflow`, + url: `coreflow`, + environment: `production`, + port: `3000`, + secretJson: { + SERVEZONE_PORT: `3000`, + SERVEZONE_ENVIRONMENT: `production`, + }, + }); + } + await sparkInstance.daemonStart(); });