fix(core): Refactored configuration management classes and improved service update handling
This commit is contained in:
parent
a72227ce0d
commit
5badfc72f4
@ -1,5 +1,12 @@
|
||||
# Changelog
|
||||
|
||||
## 2024-12-20 - 1.2.2 - fix(core)
|
||||
Refactored configuration management classes and improved service update handling
|
||||
|
||||
- Replaced SparkLocalConfig with SparkConfig for configuration management.
|
||||
- Improved service handling and update check logic.
|
||||
- Consolidated service definition and update logic for better maintainability.
|
||||
|
||||
## 2024-12-19 - 1.2.1 - fix(taskmanager)
|
||||
Remove checkinSlackTask from SparkTaskManager for streamlined task management
|
||||
|
||||
|
@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@serve.zone/spark',
|
||||
version: '1.2.1',
|
||||
version: '1.2.2',
|
||||
description: 'A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure and used by @serve.zone/cloudly as a cluster node server system manager.'
|
||||
}
|
||||
|
@ -3,7 +3,12 @@ import { Spark } from './index.js';
|
||||
|
||||
export class SparkConfig {
|
||||
public sparkRef: Spark;
|
||||
public kvStore: plugins.npmextra.KeyValueStore;
|
||||
constructor(sparkRefArg: Spark) {
|
||||
this.sparkRef = sparkRefArg;
|
||||
this.kvStore = new plugins.npmextra.KeyValueStore({
|
||||
typeArg: 'userHomeDir',
|
||||
identityArg: 'servezone_spark',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -1,15 +0,0 @@
|
||||
import * as plugins from './spark.plugins.js';
|
||||
import { Spark } from './index.js';
|
||||
|
||||
export class SparkLocalConfig {
|
||||
public sparkRef: Spark;
|
||||
private kvStore: plugins.npmextra.KeyValueStore;
|
||||
|
||||
constructor(sparkRefArg: Spark) {
|
||||
this.sparkRef = sparkRefArg;
|
||||
this.kvStore = new plugins.npmextra.KeyValueStore({
|
||||
typeArg: 'userHomeDir',
|
||||
identityArg: 'servezone_spark',
|
||||
});
|
||||
}
|
||||
}
|
@ -1,23 +1,23 @@
|
||||
import * as plugins from './spark.plugins.js';
|
||||
import { SparkTaskManager } from './spark.classes.taskmanager.js';
|
||||
import { SparkInfo } from './spark.classes.info.js';
|
||||
import { SparkUpdateManager } from './spark.classes.updatemanager.js';
|
||||
import { SparkServicesManager } from './spark.classes.updatemanager.js';
|
||||
import { logger } from './spark.logging.js';
|
||||
import { SparkLocalConfig } from './spark.classes.localconfig.js';
|
||||
import { SparkConfig } from './spark.classes.config.js';
|
||||
|
||||
export class Spark {
|
||||
public smartdaemon: plugins.smartdaemon.SmartDaemon;
|
||||
public sparkLocalConfig: SparkLocalConfig;
|
||||
public sparkConfig: SparkConfig;
|
||||
public sparkTaskManager: SparkTaskManager;
|
||||
public sparkInfo: SparkInfo;
|
||||
public sparkUpdateManager: SparkUpdateManager;
|
||||
public sparkUpdateManager: SparkServicesManager;
|
||||
|
||||
constructor() {
|
||||
this.smartdaemon = new plugins.smartdaemon.SmartDaemon();
|
||||
this.sparkLocalConfig = new SparkLocalConfig(this);
|
||||
this.sparkConfig = new SparkConfig(this);
|
||||
this.sparkInfo = new SparkInfo(this);
|
||||
this.sparkTaskManager = new SparkTaskManager(this);
|
||||
this.sparkUpdateManager = new SparkUpdateManager(this);
|
||||
this.sparkUpdateManager = new SparkServicesManager(this);
|
||||
}
|
||||
|
||||
public async daemonStart() {
|
||||
|
@ -10,7 +10,7 @@ export class SparkTaskManager {
|
||||
// tasks
|
||||
public updateSpark: plugins.taskbuffer.Task;
|
||||
public updateHost: plugins.taskbuffer.Task;
|
||||
public updateCloudly: plugins.taskbuffer.Task;
|
||||
public updateServices: plugins.taskbuffer.Task;
|
||||
|
||||
constructor(sparkRefArg: Spark) {
|
||||
this.sparkRef = sparkRefArg;
|
||||
@ -56,7 +56,10 @@ export class SparkTaskManager {
|
||||
},
|
||||
});
|
||||
|
||||
this.updateCloudly = new plugins.taskbuffer.Task({
|
||||
/**
|
||||
* only being run when mode is cloudly
|
||||
*/
|
||||
this.updateServices = new plugins.taskbuffer.Task({
|
||||
name: 'updateCloudly',
|
||||
taskFunction: async () => {
|
||||
logger.log('info', 'now running updateCloudly task');
|
||||
@ -69,9 +72,9 @@ export class SparkTaskManager {
|
||||
* start the taskmanager
|
||||
*/
|
||||
public async start() {
|
||||
this.taskmanager.addAndScheduleTask(this.updateServices, '30 */2 * * * *');
|
||||
this.taskmanager.addAndScheduleTask(this.updateSpark, '0 * * * * *');
|
||||
this.taskmanager.addAndScheduleTask(this.updateHost, '0 0 0 * * *');
|
||||
this.taskmanager.addAndScheduleTask(this.updateCloudly, '30 */2 * * * *');
|
||||
this.taskmanager.start();
|
||||
}
|
||||
|
||||
@ -81,7 +84,7 @@ export class SparkTaskManager {
|
||||
public async stop() {
|
||||
this.taskmanager.descheduleTask(this.updateSpark);
|
||||
this.taskmanager.descheduleTask(this.updateHost);
|
||||
this.taskmanager.descheduleTask(this.updateCloudly);
|
||||
this.taskmanager.descheduleTask(this.updateServices);
|
||||
this.taskmanager.stop();
|
||||
}
|
||||
}
|
||||
|
@ -3,10 +3,26 @@ import * as paths from './spark.paths.js';
|
||||
import { Spark } from './spark.classes.spark.js';
|
||||
import { logger } from './spark.logging.js';
|
||||
|
||||
export class SparkUpdateManager {
|
||||
/**
|
||||
* this class takes care of updating the services that are managed by spark
|
||||
*/
|
||||
export class SparkServicesManager {
|
||||
public sparkRef: Spark;
|
||||
public dockerHost: plugins.docker.DockerHost;
|
||||
public smartupdate: plugins.smartupdate.SmartUpdate;
|
||||
|
||||
/**
|
||||
* the services that are managed by spark
|
||||
*/
|
||||
services: Array<{
|
||||
name: string;
|
||||
image: string;
|
||||
url: string;
|
||||
port: string;
|
||||
environment: string;
|
||||
secretJson: any;
|
||||
}> = [];
|
||||
|
||||
constructor(sparkrefArg: Spark) {
|
||||
this.sparkRef = sparkrefArg;
|
||||
this.dockerHost = new plugins.docker.DockerHost({});
|
||||
@ -21,60 +37,7 @@ export class SparkUpdateManager {
|
||||
}
|
||||
|
||||
public async updateServices() {
|
||||
if (
|
||||
plugins.smartfile.fs.isDirectory(plugins.path.join(paths.homeDir, 'serve.zone/spark')) &&
|
||||
(await plugins.smartfile.fs.fileExists(
|
||||
plugins.path.join(paths.homeDir, 'serve.zone/spark/spark.json')
|
||||
))
|
||||
) {
|
||||
const services: Array<{
|
||||
name: string;
|
||||
image: string;
|
||||
url: string;
|
||||
port: string;
|
||||
environment: string;
|
||||
secretJson: any;
|
||||
}> = [];
|
||||
// lets add coreflow
|
||||
services.push({
|
||||
name: `coreflow`,
|
||||
image: `code.foss.global/serve.zone/coreflow`,
|
||||
url: `coreflow`,
|
||||
environment: `production`,
|
||||
port: `3000`,
|
||||
secretJson: {
|
||||
SERVEZONE_PORT: `3000`,
|
||||
SERVEZONE_ENVIRONMENT: `production`,
|
||||
},
|
||||
});
|
||||
|
||||
services.push({
|
||||
name: `coretraffic`,
|
||||
image: `code.foss.global/serve.zone/coretraffic`,
|
||||
url: `coreflow`,
|
||||
environment: `production`,
|
||||
port: `3000`,
|
||||
secretJson: {
|
||||
SERVEZONE_PORT: `3000`,
|
||||
SERVEZONE_ENVIRONMENT: `production`,
|
||||
},
|
||||
});
|
||||
|
||||
services.push({
|
||||
name: `corelog`,
|
||||
image: `code.foss.global/serve.zone/corelog`,
|
||||
url: `coreflow`,
|
||||
environment: `production`,
|
||||
port: `3000`,
|
||||
secretJson: {
|
||||
SERVEZONE_PORT: `3000`,
|
||||
SERVEZONE_ENVIRONMENT: `production`,
|
||||
},
|
||||
});
|
||||
|
||||
// lets add coretraffic
|
||||
|
||||
for (const service of services) {
|
||||
for (const service of this.services) {
|
||||
const existingService = await plugins.docker.DockerService.getServiceByName(
|
||||
this.dockerHost,
|
||||
service.name
|
||||
@ -86,10 +49,11 @@ export class SparkUpdateManager {
|
||||
if (existingService) {
|
||||
const needsUpdate: boolean = await existingService.needsUpdate();
|
||||
if (!needsUpdate) {
|
||||
logger.log('info', `not needing update.`);
|
||||
logger.log('info', `service >>${service.name}<< not needing update.`);
|
||||
// we simply return here to end the functions
|
||||
return;
|
||||
}
|
||||
// continuing here means we need to update the service
|
||||
logger.log('ok', `${service.name} needs to be updated!`);
|
||||
await existingService.remove();
|
||||
await existingServiceSecret.remove();
|
||||
@ -112,6 +76,7 @@ export class SparkUpdateManager {
|
||||
version: await newServiceImage.getVersion(),
|
||||
labels: {},
|
||||
});
|
||||
|
||||
const newService = await plugins.docker.DockerService.createService(this.dockerHost, {
|
||||
image: newServiceImage,
|
||||
labels: {},
|
||||
@ -123,7 +88,7 @@ export class SparkUpdateManager {
|
||||
});
|
||||
logger.log('ok', `updated service >>${newService.Spec.Name}<<!`);
|
||||
}
|
||||
logger.log('success', `updated ${services.length} services!`);
|
||||
}
|
||||
|
||||
logger.log('success', `updated ${this.services.length} services!`);
|
||||
}
|
||||
}
|
||||
|
@ -45,6 +45,50 @@ export const runCli = async () => {
|
||||
smartcliInstance.addCommand('asdaemon').subscribe(async (argvArg) => {
|
||||
logger.log('success', 'looks like we are running as daemon now');
|
||||
logger.log('info', 'starting spark in daemon mode');
|
||||
|
||||
// lets determine the mode if specified
|
||||
let mode = argvArg.mode;
|
||||
if (mode === 'cloudly') {
|
||||
await sparkInstance.sparkConfig.kvStore.writeKey('mode', 'cloudly');
|
||||
} else if (mode === 'coreflow-node') {
|
||||
await sparkInstance.sparkConfig.kvStore.writeKey('mode', 'coreflow-node');
|
||||
} else if (mode) {
|
||||
logger.log('error', 'unknown mode specified');
|
||||
process.exit(1);
|
||||
} else {
|
||||
// mode is not specified by cli, lets get it from the config
|
||||
mode = await sparkInstance.sparkConfig.kvStore.readKey('mode');
|
||||
}
|
||||
|
||||
if (!mode) {
|
||||
logger.log('error', 'no mode specified by either cli or config');
|
||||
process.exit(1);
|
||||
} else if (mode === 'cloudly') {
|
||||
sparkInstance.sparkUpdateManager.services.push({
|
||||
name: `coreflow`,
|
||||
image: `code.foss.global/serve.zone/cloudly`,
|
||||
url: `cloudly`,
|
||||
environment: `production`,
|
||||
port: `3000`,
|
||||
secretJson: {
|
||||
SERVEZONE_PORT: `3000`,
|
||||
SERVEZONE_ENVIRONMENT: `production`,
|
||||
},
|
||||
});
|
||||
} else if (mode === 'coreflow-node') {
|
||||
sparkInstance.sparkUpdateManager.services.push({
|
||||
name: `coreflow`,
|
||||
image: `code.foss.global/serve.zone/coreflow`,
|
||||
url: `coreflow`,
|
||||
environment: `production`,
|
||||
port: `3000`,
|
||||
secretJson: {
|
||||
SERVEZONE_PORT: `3000`,
|
||||
SERVEZONE_ENVIRONMENT: `production`,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
await sparkInstance.daemonStart();
|
||||
});
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user