fix(core): update
This commit is contained in:
8
ts/00_commitinfo_data.ts
Normal file
8
ts/00_commitinfo_data.ts
Normal file
@ -0,0 +1,8 @@
|
||||
/**
|
||||
* autocreated commitinfo by @pushrocks/commitinfo
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@serve.zone/spark',
|
||||
version: '1.0.83',
|
||||
description: 'sparks the servezone services'
|
||||
}
|
7
ts/index.ts
Normal file
7
ts/index.ts
Normal file
@ -0,0 +1,7 @@
|
||||
export * from './spark.classes.spark.js';
|
||||
|
||||
import * as cli from './spark.cli.js';
|
||||
|
||||
export const runCli = async () => {
|
||||
cli.runCli();
|
||||
};
|
9
ts/spark.classes.config.ts
Normal file
9
ts/spark.classes.config.ts
Normal file
@ -0,0 +1,9 @@
|
||||
import * as plugins from './spark.plugins.js';
|
||||
import { Spark } from './index.js';
|
||||
|
||||
export class SparkConfig {
|
||||
public sparkRef: Spark;
|
||||
constructor(sparkRefArg: Spark) {
|
||||
this.sparkRef = sparkRefArg;
|
||||
}
|
||||
}
|
12
ts/spark.classes.info.ts
Normal file
12
ts/spark.classes.info.ts
Normal file
@ -0,0 +1,12 @@
|
||||
import * as plugins from './spark.plugins.js';
|
||||
import * as paths from './spark.paths.js';
|
||||
import { Spark } from './spark.classes.spark.js';
|
||||
|
||||
export class SparkInfo {
|
||||
public sparkRef: Spark;
|
||||
public projectInfo = new plugins.projectinfo.ProjectinfoNpm(paths.packageDir);
|
||||
|
||||
constructor(sparkRefArg: Spark) {
|
||||
this.sparkRef = sparkRefArg;
|
||||
}
|
||||
}
|
12
ts/spark.classes.localconfig.ts
Normal file
12
ts/spark.classes.localconfig.ts
Normal file
@ -0,0 +1,12 @@
|
||||
import * as plugins from './spark.plugins.js';
|
||||
import { Spark } from './index.js';
|
||||
|
||||
export class SparkLocalConfig {
|
||||
public sparkRef: Spark;
|
||||
private kvStore: plugins.npmextra.KeyValueStore;
|
||||
|
||||
constructor(sparkRefArg: Spark) {
|
||||
this.sparkRef = sparkRefArg;
|
||||
this.kvStore = new plugins.npmextra.KeyValueStore('custom', 'spark');
|
||||
}
|
||||
}
|
27
ts/spark.classes.spark.ts
Normal file
27
ts/spark.classes.spark.ts
Normal file
@ -0,0 +1,27 @@
|
||||
import * as plugins from './spark.plugins.js';
|
||||
import { SparkTaskManager } from './spark.classes.taskmanager.js';
|
||||
import { SparkInfo } from './spark.classes.info.js';
|
||||
import { SparkUpdateManager } from './spark.classes.updatemanager.js';
|
||||
import { logger } from './spark.logging.js';
|
||||
import { SparkLocalConfig } from './spark.classes.localconfig.js';
|
||||
|
||||
export class Spark {
|
||||
public smartdaemon: plugins.smartdaemon.SmartDaemon;
|
||||
public sparkLocalConfig: SparkLocalConfig;
|
||||
public sparkTaskManager: SparkTaskManager;
|
||||
public sparkInfo: SparkInfo;
|
||||
public sparkUpdateManager: SparkUpdateManager;
|
||||
|
||||
constructor() {
|
||||
this.smartdaemon = new plugins.smartdaemon.SmartDaemon();
|
||||
this.sparkLocalConfig = new SparkLocalConfig(this);
|
||||
this.sparkInfo = new SparkInfo(this);
|
||||
this.sparkTaskManager = new SparkTaskManager(this);
|
||||
this.sparkUpdateManager = new SparkUpdateManager(this);
|
||||
}
|
||||
|
||||
public async daemonStart() {
|
||||
await this.sparkUpdateManager.start();
|
||||
this.sparkTaskManager.start();
|
||||
}
|
||||
}
|
100
ts/spark.classes.taskmanager.ts
Normal file
100
ts/spark.classes.taskmanager.ts
Normal file
@ -0,0 +1,100 @@
|
||||
import * as plugins from './spark.plugins.js';
|
||||
import { Spark } from './index.js';
|
||||
import * as paths from './spark.paths.js';
|
||||
import { logger } from './spark.logging.js';
|
||||
|
||||
export class SparkTaskManager {
|
||||
public sparkRef: Spark;
|
||||
public taskmanager: plugins.taskbuffer.TaskManager;
|
||||
|
||||
// tasks
|
||||
public checkinSlackTask: plugins.taskbuffer.Task;
|
||||
public updateSpark: plugins.taskbuffer.Task;
|
||||
public updateHost: plugins.taskbuffer.Task;
|
||||
public updateCloudly: plugins.taskbuffer.Task;
|
||||
|
||||
constructor(sparkRefArg: Spark) {
|
||||
this.sparkRef = sparkRefArg;
|
||||
this.taskmanager = new plugins.taskbuffer.TaskManager();
|
||||
|
||||
// checkinOnSlack
|
||||
this.checkinSlackTask = new plugins.taskbuffer.Task({
|
||||
name: 'checkinSlack',
|
||||
taskFunction: async () => {
|
||||
logger.log('ok', 'running hourly checkin now');
|
||||
|
||||
logger.log('info', 'completed hourly checkin');
|
||||
},
|
||||
});
|
||||
|
||||
// updateSpark
|
||||
this.updateSpark = new plugins.taskbuffer.Task({
|
||||
name: 'updateSpark',
|
||||
taskFunction: async () => {
|
||||
const smartupdateInstance = new plugins.smartupdate.SmartUpdate({
|
||||
npmRegistryUrl: 'https://registry.npmjs.org',
|
||||
});
|
||||
const shouldUpdate = await smartupdateInstance.check(
|
||||
this.sparkRef.sparkInfo.projectInfo.name,
|
||||
this.sparkRef.sparkInfo.projectInfo.version
|
||||
);
|
||||
if (shouldUpdate) {
|
||||
await this.stop();
|
||||
const smartshellInstance = new plugins.smartshell.Smartshell({
|
||||
executor: 'bash',
|
||||
});
|
||||
|
||||
await smartshellInstance.exec(`cd / && npm upgrade -g && spark updatedaemon`);
|
||||
logger.log('info', 'Cooling off before restart...');
|
||||
await plugins.smartdelay.delayFor(5000);
|
||||
logger.log('ok', '######## Trying to exit / Restart expected... ########');
|
||||
process.exit(0);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
this.updateHost = new plugins.taskbuffer.Task({
|
||||
name: 'updateHost',
|
||||
taskFunction: async () => {
|
||||
await this.stop();
|
||||
const smartshellInstance = new plugins.smartshell.Smartshell({
|
||||
executor: 'bash',
|
||||
});
|
||||
await smartshellInstance.exec(
|
||||
`apt-get update && apt-get upgrade -y --force-yes && apt-get autoremove -y --force-yes && apt-get autoclean -y --force-yes`
|
||||
);
|
||||
await this.start();
|
||||
},
|
||||
});
|
||||
|
||||
this.updateCloudly = new plugins.taskbuffer.Task({
|
||||
name: 'updateCloudly',
|
||||
taskFunction: async () => {
|
||||
logger.log('info', 'now running updateCloudly task');
|
||||
this.sparkRef.sparkUpdateManager.updateServices();
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* start the taskmanager
|
||||
*/
|
||||
public async start() {
|
||||
this.taskmanager.addAndScheduleTask(this.checkinSlackTask, '0 0 * * * *');
|
||||
this.taskmanager.addAndScheduleTask(this.updateSpark, '0 * * * * *');
|
||||
this.taskmanager.addAndScheduleTask(this.updateHost, '0 0 0 * * *');
|
||||
this.taskmanager.addAndScheduleTask(this.updateCloudly, '30 */2 * * * *');
|
||||
this.taskmanager.start();
|
||||
}
|
||||
|
||||
/**
|
||||
* stops the taskmanager
|
||||
*/
|
||||
public async stop() {
|
||||
this.taskmanager.descheduleTask(this.checkinSlackTask);
|
||||
this.taskmanager.descheduleTask(this.updateSpark);
|
||||
this.taskmanager.descheduleTask(this.updateHost);
|
||||
this.taskmanager.descheduleTask(this.updateCloudly);
|
||||
this.taskmanager.stop();
|
||||
}
|
||||
}
|
99
ts/spark.classes.updatemanager.ts
Normal file
99
ts/spark.classes.updatemanager.ts
Normal file
@ -0,0 +1,99 @@
|
||||
import * as plugins from './spark.plugins.js';
|
||||
import * as paths from './spark.paths.js';
|
||||
import { Spark } from './spark.classes.spark.js';
|
||||
import { logger } from './spark.logging.js';
|
||||
|
||||
export class SparkUpdateManager {
|
||||
public sparkRef: Spark;
|
||||
public dockerHost: plugins.docker.DockerHost;
|
||||
public smartupdate: plugins.smartupdate.SmartUpdate;
|
||||
constructor(sparkrefArg: Spark) {
|
||||
this.sparkRef = sparkrefArg;
|
||||
this.dockerHost = new plugins.docker.DockerHost();
|
||||
this.smartupdate = new plugins.smartupdate.SmartUpdate();
|
||||
}
|
||||
|
||||
/**
|
||||
* start the instance
|
||||
*/
|
||||
public async start() {
|
||||
await this.dockerHost.activateSwarm();
|
||||
}
|
||||
|
||||
public async updateServices() {
|
||||
if (
|
||||
plugins.smartfile.fs.isDirectory(plugins.path.join(paths.homeDir, 'serve.zone/spark')) &&
|
||||
(await plugins.smartfile.fs.fileExists(
|
||||
plugins.path.join(paths.homeDir, 'serve.zone/spark/spark.json')
|
||||
))
|
||||
) {
|
||||
const sparkJson = plugins.smartfile.fs.toObjectSync(
|
||||
plugins.path.join(paths.homeDir, 'serve.zone/spark/spark.json')
|
||||
);
|
||||
const services: Array<{
|
||||
name: string;
|
||||
image: string;
|
||||
url: string;
|
||||
port: string;
|
||||
environment: string;
|
||||
secretJson: any;
|
||||
}> = [];
|
||||
for (const serviceKey of Object.keys(sparkJson.services)) {
|
||||
services.push({
|
||||
name: serviceKey,
|
||||
image: sparkJson.services[serviceKey].image,
|
||||
url: sparkJson.services[serviceKey].url,
|
||||
environment: sparkJson.services[serviceKey].environment,
|
||||
port: sparkJson.services[serviceKey].port,
|
||||
secretJson: sparkJson.services[serviceKey].secretJson,
|
||||
});
|
||||
}
|
||||
for (const service of services) {
|
||||
const existingService = await plugins.docker.DockerService.getServiceByName(
|
||||
this.dockerHost,
|
||||
service.name
|
||||
);
|
||||
const existingServiceSecret = await plugins.docker.DockerSecret.getSecretByName(
|
||||
this.dockerHost,
|
||||
`${service.name}Secret`
|
||||
);
|
||||
if (existingService) {
|
||||
const needsUpdate: boolean = await existingService.needsUpdate();
|
||||
if (!needsUpdate) {
|
||||
logger.log('info', `not needing update.`);
|
||||
return;
|
||||
}
|
||||
logger.log('ok', `${service.name} needs to be updated!`);
|
||||
await existingService.remove();
|
||||
await existingServiceSecret.remove();
|
||||
}
|
||||
if (!existingService && existingServiceSecret) {
|
||||
await existingServiceSecret.remove();
|
||||
}
|
||||
|
||||
const newServiceImage = await plugins.docker.DockerImage.createFromRegistry(
|
||||
this.dockerHost,
|
||||
{
|
||||
imageUrl: service.image,
|
||||
}
|
||||
);
|
||||
const newServiceSecret = await plugins.docker.DockerSecret.createSecret(this.dockerHost, {
|
||||
name: `${service.name}Secret`,
|
||||
contentArg: plugins.smartjson.stringify(service.secretJson),
|
||||
version: await newServiceImage.getVersion(),
|
||||
labels: {},
|
||||
});
|
||||
const newService = await plugins.docker.DockerService.createService(this.dockerHost, {
|
||||
image: newServiceImage,
|
||||
labels: {},
|
||||
name: service.name,
|
||||
networkAlias: service.name,
|
||||
networks: [],
|
||||
secrets: [newServiceSecret],
|
||||
ports: [`${service.port}:${service.secretJson.SERVEZONE_PORT}`],
|
||||
});
|
||||
logger.log('success', 'updated all services!');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
95
ts/spark.cli.ts
Normal file
95
ts/spark.cli.ts
Normal file
@ -0,0 +1,95 @@
|
||||
import * as plugins from './spark.plugins.js';
|
||||
import * as paths from './spark.paths.js';
|
||||
import { Spark } from './spark.classes.spark.js';
|
||||
import { logger } from './spark.logging.js';
|
||||
|
||||
export const runCli = async () => {
|
||||
const smartshellInstance = new plugins.smartshell.Smartshell({
|
||||
executor: 'bash',
|
||||
});
|
||||
const sparkInstance = new Spark();
|
||||
const smartcliInstance = new plugins.smartcli.Smartcli();
|
||||
smartcliInstance.standardCommand().subscribe(async () => {
|
||||
logger.log('info', 'no action specified! you can type:');
|
||||
logger.log('info', '* installdaemon');
|
||||
});
|
||||
|
||||
smartcliInstance.addCommand('installdaemon').subscribe(async (argvArg) => {
|
||||
logger.log('ok', 'we are apparently not running as daemon yet');
|
||||
logger.log('info', 'trying to set this up now');
|
||||
const sparkService = await sparkInstance.smartdaemon.addService({
|
||||
name: 'spark',
|
||||
version: sparkInstance.sparkInfo.projectInfo.version,
|
||||
command: 'spark asdaemon',
|
||||
description: 'spark daemon service',
|
||||
workingDir: paths.packageDir,
|
||||
});
|
||||
await sparkService.save();
|
||||
await sparkService.enable();
|
||||
await sparkService.start();
|
||||
});
|
||||
|
||||
smartcliInstance.addCommand('updatedaemon').subscribe(async (argvArg) => {
|
||||
logger.log('ok', 'we are apparently trying to update the daemon for spark');
|
||||
logger.log('info', 'trying to set this up now');
|
||||
const sparkService = await sparkInstance.smartdaemon.addService({
|
||||
name: 'spark',
|
||||
version: sparkInstance.sparkInfo.projectInfo.version,
|
||||
command: 'spark asdaemon',
|
||||
description: 'spark daemon service',
|
||||
workingDir: paths.packageDir,
|
||||
});
|
||||
await sparkService.reload();
|
||||
});
|
||||
|
||||
smartcliInstance.addCommand('asdaemon').subscribe(async (argvArg) => {
|
||||
logger.log('success', 'looks like we are running as daemon now');
|
||||
logger.log('info', 'starting spark in daemon mode');
|
||||
await sparkInstance.daemonStart();
|
||||
});
|
||||
|
||||
smartcliInstance.addCommand('logs').subscribe(async (argvArg) => {
|
||||
smartshellInstance.exec(`journalctl -u smartdaemon_spark -f`);
|
||||
});
|
||||
|
||||
smartcliInstance.addCommand('prune').subscribe(async (argvArg) => {
|
||||
// daemon
|
||||
await smartshellInstance.exec(`systemctl stop smartdaemon_spark`);
|
||||
logger.log('ok', 'stopped serverconfig daemon');
|
||||
await plugins.smartdelay.delayFor(5000);
|
||||
|
||||
// services
|
||||
await smartshellInstance.exec(`docker stack rm $(docker stack ls -q)`);
|
||||
logger.log('ok', 'removed docker stacks');
|
||||
await plugins.smartdelay.delayFor(5000);
|
||||
|
||||
// services
|
||||
await smartshellInstance.exec(`docker service rm $(docker service ls -q)`);
|
||||
logger.log('ok', 'removed docker services');
|
||||
await plugins.smartdelay.delayFor(5000);
|
||||
|
||||
// secrets
|
||||
await smartshellInstance.exec(`docker secret rm $(docker secret ls -q)`);
|
||||
logger.log('ok', 'removed docker secrets');
|
||||
await plugins.smartdelay.delayFor(5000);
|
||||
|
||||
// networks
|
||||
await smartshellInstance.exec(`docker network rm szncorechat sznwebgateway`);
|
||||
logger.log('ok', 'removed docker networks');
|
||||
await plugins.smartdelay.delayFor(5000);
|
||||
|
||||
await smartshellInstance.exec(`docker system prune -af`);
|
||||
logger.log('ok', 'pruned docker system');
|
||||
await plugins.smartdelay.delayFor(5000);
|
||||
|
||||
// restart docker
|
||||
await smartshellInstance.exec(`systemctl restart docker`);
|
||||
logger.log('ok', 'restarted the docker service');
|
||||
await plugins.smartdelay.delayFor(5000);
|
||||
|
||||
// serverconfig daemon
|
||||
await smartshellInstance.exec(`systemctl start smartdaemon_spark`);
|
||||
logger.log('ok', 'handed over control back to serverconfig daemon');
|
||||
});
|
||||
smartcliInstance.startParse();
|
||||
};
|
15
ts/spark.logging.ts
Normal file
15
ts/spark.logging.ts
Normal file
@ -0,0 +1,15 @@
|
||||
import * as plugins from './spark.plugins.js';
|
||||
import * as paths from './spark.paths.js';
|
||||
|
||||
const projectInfoNpm = new plugins.projectinfo.ProjectinfoNpm(paths.packageDir);
|
||||
|
||||
export const logger = new plugins.smartlog.Smartlog({
|
||||
logContext: {
|
||||
environment: 'production',
|
||||
runtime: 'node',
|
||||
zone: 'baremetal',
|
||||
company: null,
|
||||
companyunit: null,
|
||||
containerName: 'spark',
|
||||
}
|
||||
});
|
4
ts/spark.paths.ts
Normal file
4
ts/spark.paths.ts
Normal file
@ -0,0 +1,4 @@
|
||||
import * as plugins from './spark.plugins.js';
|
||||
|
||||
export const packageDir = plugins.path.join(plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url), '../');
|
||||
export const homeDir = plugins.smartpath.get.home();
|
40
ts/spark.plugins.ts
Normal file
40
ts/spark.plugins.ts
Normal file
@ -0,0 +1,40 @@
|
||||
// node native scope
|
||||
import * as path from 'path';
|
||||
|
||||
export { path };
|
||||
|
||||
// @apiclient.xyz scope
|
||||
import * as docker from '@apiclient.xyz/docker';
|
||||
|
||||
export { docker };
|
||||
|
||||
// @pushrocks scope
|
||||
import * as npmextra from '@push.rocks/npmextra';
|
||||
import * as projectinfo from '@push.rocks/projectinfo';
|
||||
import * as smartcli from '@push.rocks/smartcli';
|
||||
import * as smartdaemon from '@push.rocks/smartdaemon';
|
||||
import * as smartdelay from '@push.rocks/smartdelay';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartjson from '@push.rocks/smartjson';
|
||||
import * as smartlog from '@push.rocks/smartlog';
|
||||
import * as smartlogDestinationLocal from '@push.rocks/smartlog-destination-local';
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartshell from '@push.rocks/smartshell';
|
||||
import * as smartupdate from '@push.rocks/smartupdate';
|
||||
import * as taskbuffer from '@push.rocks/taskbuffer';
|
||||
|
||||
export {
|
||||
npmextra,
|
||||
projectinfo,
|
||||
smartcli,
|
||||
smartdaemon,
|
||||
smartdelay,
|
||||
smartfile,
|
||||
smartjson,
|
||||
smartlog,
|
||||
smartlogDestinationLocal,
|
||||
smartpath,
|
||||
smartshell,
|
||||
smartupdate,
|
||||
taskbuffer,
|
||||
};
|
Reference in New Issue
Block a user