Compare commits

...

4 Commits

10 changed files with 1125 additions and 1810 deletions

View File

@ -1,5 +1,18 @@
# Changelog # Changelog
## 2024-12-20 - 1.2.2 - fix(core)
Refactored configuration management classes and improved service update handling
- Replaced SparkLocalConfig with SparkConfig for configuration management.
- Improved service handling and update check logic.
- Consolidated service definition and update logic for better maintainability.
## 2024-12-19 - 1.2.1 - fix(taskmanager)
Remove checkinSlackTask from SparkTaskManager for streamlined task management
- checkinSlackTask has been removed from the task manager class.
- Removal of the slack check-in task allows the system to focus on essential update tasks.
## 2024-12-18 - 1.2.0 - feat(core) ## 2024-12-18 - 1.2.0 - feat(core)
Initial commit of the Spark project with core functionalities for server management and integration with Docker. Initial commit of the Spark project with core functionalities for server management and integration with Docker.

View File

@ -1,6 +1,6 @@
{ {
"name": "@serve.zone/spark", "name": "@serve.zone/spark",
"version": "1.2.0", "version": "1.2.2",
"private": false, "private": false,
"description": "A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure and used by @serve.zone/cloudly as a cluster node server system manager.", "description": "A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure and used by @serve.zone/cloudly as a cluster node server system manager.",
"main": "dist_ts/index.js", "main": "dist_ts/index.js",

2649
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@ -3,6 +3,6 @@
*/ */
export const commitinfo = { export const commitinfo = {
name: '@serve.zone/spark', name: '@serve.zone/spark',
version: '1.2.0', version: '1.2.2',
description: 'A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure and used by @serve.zone/cloudly as a cluster node server system manager.' description: 'A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure and used by @serve.zone/cloudly as a cluster node server system manager.'
} }

View File

@ -3,7 +3,12 @@ import { Spark } from './index.js';
export class SparkConfig { export class SparkConfig {
public sparkRef: Spark; public sparkRef: Spark;
public kvStore: plugins.npmextra.KeyValueStore;
constructor(sparkRefArg: Spark) { constructor(sparkRefArg: Spark) {
this.sparkRef = sparkRefArg; this.sparkRef = sparkRefArg;
this.kvStore = new plugins.npmextra.KeyValueStore({
typeArg: 'userHomeDir',
identityArg: 'servezone_spark',
});
} }
} }

View File

@ -1,15 +0,0 @@
import * as plugins from './spark.plugins.js';
import { Spark } from './index.js';
export class SparkLocalConfig {
public sparkRef: Spark;
private kvStore: plugins.npmextra.KeyValueStore;
constructor(sparkRefArg: Spark) {
this.sparkRef = sparkRefArg;
this.kvStore = new plugins.npmextra.KeyValueStore({
typeArg: 'userHomeDir',
identityArg: 'servezone_spark',
});
}
}

View File

@ -1,23 +1,23 @@
import * as plugins from './spark.plugins.js'; import * as plugins from './spark.plugins.js';
import { SparkTaskManager } from './spark.classes.taskmanager.js'; import { SparkTaskManager } from './spark.classes.taskmanager.js';
import { SparkInfo } from './spark.classes.info.js'; import { SparkInfo } from './spark.classes.info.js';
import { SparkUpdateManager } from './spark.classes.updatemanager.js'; import { SparkServicesManager } from './spark.classes.updatemanager.js';
import { logger } from './spark.logging.js'; import { logger } from './spark.logging.js';
import { SparkLocalConfig } from './spark.classes.localconfig.js'; import { SparkConfig } from './spark.classes.config.js';
export class Spark { export class Spark {
public smartdaemon: plugins.smartdaemon.SmartDaemon; public smartdaemon: plugins.smartdaemon.SmartDaemon;
public sparkLocalConfig: SparkLocalConfig; public sparkConfig: SparkConfig;
public sparkTaskManager: SparkTaskManager; public sparkTaskManager: SparkTaskManager;
public sparkInfo: SparkInfo; public sparkInfo: SparkInfo;
public sparkUpdateManager: SparkUpdateManager; public sparkUpdateManager: SparkServicesManager;
constructor() { constructor() {
this.smartdaemon = new plugins.smartdaemon.SmartDaemon(); this.smartdaemon = new plugins.smartdaemon.SmartDaemon();
this.sparkLocalConfig = new SparkLocalConfig(this); this.sparkConfig = new SparkConfig(this);
this.sparkInfo = new SparkInfo(this); this.sparkInfo = new SparkInfo(this);
this.sparkTaskManager = new SparkTaskManager(this); this.sparkTaskManager = new SparkTaskManager(this);
this.sparkUpdateManager = new SparkUpdateManager(this); this.sparkUpdateManager = new SparkServicesManager(this);
} }
public async daemonStart() { public async daemonStart() {

View File

@ -8,25 +8,14 @@ export class SparkTaskManager {
public taskmanager: plugins.taskbuffer.TaskManager; public taskmanager: plugins.taskbuffer.TaskManager;
// tasks // tasks
public checkinSlackTask: plugins.taskbuffer.Task;
public updateSpark: plugins.taskbuffer.Task; public updateSpark: plugins.taskbuffer.Task;
public updateHost: plugins.taskbuffer.Task; public updateHost: plugins.taskbuffer.Task;
public updateCloudly: plugins.taskbuffer.Task; public updateServices: plugins.taskbuffer.Task;
constructor(sparkRefArg: Spark) { constructor(sparkRefArg: Spark) {
this.sparkRef = sparkRefArg; this.sparkRef = sparkRefArg;
this.taskmanager = new plugins.taskbuffer.TaskManager(); this.taskmanager = new plugins.taskbuffer.TaskManager();
// checkinOnSlack
this.checkinSlackTask = new plugins.taskbuffer.Task({
name: 'checkinSlack',
taskFunction: async () => {
logger.log('ok', 'running hourly checkin now');
logger.log('info', 'completed hourly checkin');
},
});
// updateSpark // updateSpark
this.updateSpark = new plugins.taskbuffer.Task({ this.updateSpark = new plugins.taskbuffer.Task({
name: 'updateSpark', name: 'updateSpark',
@ -67,7 +56,10 @@ export class SparkTaskManager {
}, },
}); });
this.updateCloudly = new plugins.taskbuffer.Task({ /**
* only being run when mode is cloudly
*/
this.updateServices = new plugins.taskbuffer.Task({
name: 'updateCloudly', name: 'updateCloudly',
taskFunction: async () => { taskFunction: async () => {
logger.log('info', 'now running updateCloudly task'); logger.log('info', 'now running updateCloudly task');
@ -80,10 +72,9 @@ export class SparkTaskManager {
* start the taskmanager * start the taskmanager
*/ */
public async start() { public async start() {
this.taskmanager.addAndScheduleTask(this.checkinSlackTask, '0 0 * * * *'); this.taskmanager.addAndScheduleTask(this.updateServices, '30 */2 * * * *');
this.taskmanager.addAndScheduleTask(this.updateSpark, '0 * * * * *'); this.taskmanager.addAndScheduleTask(this.updateSpark, '0 * * * * *');
this.taskmanager.addAndScheduleTask(this.updateHost, '0 0 0 * * *'); this.taskmanager.addAndScheduleTask(this.updateHost, '0 0 0 * * *');
this.taskmanager.addAndScheduleTask(this.updateCloudly, '30 */2 * * * *');
this.taskmanager.start(); this.taskmanager.start();
} }
@ -91,10 +82,9 @@ export class SparkTaskManager {
* stops the taskmanager * stops the taskmanager
*/ */
public async stop() { public async stop() {
this.taskmanager.descheduleTask(this.checkinSlackTask);
this.taskmanager.descheduleTask(this.updateSpark); this.taskmanager.descheduleTask(this.updateSpark);
this.taskmanager.descheduleTask(this.updateHost); this.taskmanager.descheduleTask(this.updateHost);
this.taskmanager.descheduleTask(this.updateCloudly); this.taskmanager.descheduleTask(this.updateServices);
this.taskmanager.stop(); this.taskmanager.stop();
} }
} }

View File

@ -3,10 +3,26 @@ import * as paths from './spark.paths.js';
import { Spark } from './spark.classes.spark.js'; import { Spark } from './spark.classes.spark.js';
import { logger } from './spark.logging.js'; import { logger } from './spark.logging.js';
export class SparkUpdateManager { /**
* this class takes care of updating the services that are managed by spark
*/
export class SparkServicesManager {
public sparkRef: Spark; public sparkRef: Spark;
public dockerHost: plugins.docker.DockerHost; public dockerHost: plugins.docker.DockerHost;
public smartupdate: plugins.smartupdate.SmartUpdate; public smartupdate: plugins.smartupdate.SmartUpdate;
/**
* the services that are managed by spark
*/
services: Array<{
name: string;
image: string;
url: string;
port: string;
environment: string;
secretJson: any;
}> = [];
constructor(sparkrefArg: Spark) { constructor(sparkrefArg: Spark) {
this.sparkRef = sparkrefArg; this.sparkRef = sparkrefArg;
this.dockerHost = new plugins.docker.DockerHost({}); this.dockerHost = new plugins.docker.DockerHost({});
@ -21,109 +37,58 @@ export class SparkUpdateManager {
} }
public async updateServices() { public async updateServices() {
if ( for (const service of this.services) {
plugins.smartfile.fs.isDirectory(plugins.path.join(paths.homeDir, 'serve.zone/spark')) && const existingService = await plugins.docker.DockerService.getServiceByName(
(await plugins.smartfile.fs.fileExists( this.dockerHost,
plugins.path.join(paths.homeDir, 'serve.zone/spark/spark.json') service.name
)) );
) { const existingServiceSecret = await plugins.docker.DockerSecret.getSecretByName(
const services: Array<{ this.dockerHost,
name: string; `${service.name}Secret`
image: string; );
url: string; if (existingService) {
port: string; const needsUpdate: boolean = await existingService.needsUpdate();
environment: string; if (!needsUpdate) {
secretJson: any; logger.log('info', `service >>${service.name}<< not needing update.`);
}> = []; // we simply return here to end the functions
// lets add coreflow return;
services.push({
name: `coreflow`,
image: `code.foss.global/serve.zone/coreflow`,
url: `coreflow`,
environment: `production`,
port: `3000`,
secretJson: {
SERVEZONE_PORT: `3000`,
SERVEZONE_ENVIRONMENT: `production`,
},
});
services.push({
name: `coretraffic`,
image: `code.foss.global/serve.zone/coretraffic`,
url: `coreflow`,
environment: `production`,
port: `3000`,
secretJson: {
SERVEZONE_PORT: `3000`,
SERVEZONE_ENVIRONMENT: `production`,
},
});
services.push({
name: `corelog`,
image: `code.foss.global/serve.zone/corelog`,
url: `coreflow`,
environment: `production`,
port: `3000`,
secretJson: {
SERVEZONE_PORT: `3000`,
SERVEZONE_ENVIRONMENT: `production`,
},
});
// lets add coretraffic
for (const service of services) {
const existingService = await plugins.docker.DockerService.getServiceByName(
this.dockerHost,
service.name
);
const existingServiceSecret = await plugins.docker.DockerSecret.getSecretByName(
this.dockerHost,
`${service.name}Secret`
);
if (existingService) {
const needsUpdate: boolean = await existingService.needsUpdate();
if (!needsUpdate) {
logger.log('info', `not needing update.`);
// we simply return here to end the functions
return;
}
logger.log('ok', `${service.name} needs to be updated!`);
await existingService.remove();
await existingServiceSecret.remove();
} }
if (!existingService && existingServiceSecret) { // continuing here means we need to update the service
await existingServiceSecret.remove(); logger.log('ok', `${service.name} needs to be updated!`);
} await existingService.remove();
await existingServiceSecret.remove();
const newServiceImage = await plugins.docker.DockerImage.createFromRegistry(
this.dockerHost,
{
creationObject: {
imageUrl: service.image,
},
}
);
const newServiceSecret = await plugins.docker.DockerSecret.createSecret(this.dockerHost, {
name: `${service.name}Secret`,
contentArg: plugins.smartjson.stringify(service.secretJson),
version: await newServiceImage.getVersion(),
labels: {},
});
const newService = await plugins.docker.DockerService.createService(this.dockerHost, {
image: newServiceImage,
labels: {},
name: service.name,
networkAlias: service.name,
networks: [],
secrets: [newServiceSecret],
ports: [`${service.port}:${service.secretJson.SERVEZONE_PORT}`],
});
logger.log('ok', `updated service >>${newService.Spec.Name}<<!`);
} }
logger.log('success', `updated ${services.length} services!`); if (!existingService && existingServiceSecret) {
await existingServiceSecret.remove();
}
const newServiceImage = await plugins.docker.DockerImage.createFromRegistry(
this.dockerHost,
{
creationObject: {
imageUrl: service.image,
},
}
);
const newServiceSecret = await plugins.docker.DockerSecret.createSecret(this.dockerHost, {
name: `${service.name}Secret`,
contentArg: plugins.smartjson.stringify(service.secretJson),
version: await newServiceImage.getVersion(),
labels: {},
});
const newService = await plugins.docker.DockerService.createService(this.dockerHost, {
image: newServiceImage,
labels: {},
name: service.name,
networkAlias: service.name,
networks: [],
secrets: [newServiceSecret],
ports: [`${service.port}:${service.secretJson.SERVEZONE_PORT}`],
});
logger.log('ok', `updated service >>${newService.Spec.Name}<<!`);
} }
logger.log('success', `updated ${this.services.length} services!`);
} }
} }

View File

@ -45,6 +45,50 @@ export const runCli = async () => {
smartcliInstance.addCommand('asdaemon').subscribe(async (argvArg) => { smartcliInstance.addCommand('asdaemon').subscribe(async (argvArg) => {
logger.log('success', 'looks like we are running as daemon now'); logger.log('success', 'looks like we are running as daemon now');
logger.log('info', 'starting spark in daemon mode'); logger.log('info', 'starting spark in daemon mode');
// lets determine the mode if specified
let mode = argvArg.mode;
if (mode === 'cloudly') {
await sparkInstance.sparkConfig.kvStore.writeKey('mode', 'cloudly');
} else if (mode === 'coreflow-node') {
await sparkInstance.sparkConfig.kvStore.writeKey('mode', 'coreflow-node');
} else if (mode) {
logger.log('error', 'unknown mode specified');
process.exit(1);
} else {
// mode is not specified by cli, lets get it from the config
mode = await sparkInstance.sparkConfig.kvStore.readKey('mode');
}
if (!mode) {
logger.log('error', 'no mode specified by either cli or config');
process.exit(1);
} else if (mode === 'cloudly') {
sparkInstance.sparkUpdateManager.services.push({
name: `coreflow`,
image: `code.foss.global/serve.zone/cloudly`,
url: `cloudly`,
environment: `production`,
port: `3000`,
secretJson: {
SERVEZONE_PORT: `3000`,
SERVEZONE_ENVIRONMENT: `production`,
},
});
} else if (mode === 'coreflow-node') {
sparkInstance.sparkUpdateManager.services.push({
name: `coreflow`,
image: `code.foss.global/serve.zone/coreflow`,
url: `coreflow`,
environment: `production`,
port: `3000`,
secretJson: {
SERVEZONE_PORT: `3000`,
SERVEZONE_ENVIRONMENT: `production`,
},
});
}
await sparkInstance.daemonStart(); await sparkInstance.daemonStart();
}); });