2024-05-08 18:49:10 +00:00
|
|
|
import * as plugins from './spark.plugins.js';
|
|
|
|
import * as paths from './spark.paths.js';
|
|
|
|
import { Spark } from './spark.classes.spark.js';
|
|
|
|
import { logger } from './spark.logging.js';
|
|
|
|
|
|
|
|
export const runCli = async () => {
|
|
|
|
const smartshellInstance = new plugins.smartshell.Smartshell({
|
|
|
|
executor: 'bash',
|
|
|
|
});
|
|
|
|
const sparkInstance = new Spark();
|
|
|
|
const smartcliInstance = new plugins.smartcli.Smartcli();
|
|
|
|
smartcliInstance.standardCommand().subscribe(async () => {
|
|
|
|
logger.log('info', 'no action specified! you can type:');
|
|
|
|
logger.log('info', '* installdaemon');
|
|
|
|
});
|
|
|
|
|
|
|
|
smartcliInstance.addCommand('installdaemon').subscribe(async (argvArg) => {
|
|
|
|
logger.log('ok', 'we are apparently not running as daemon yet');
|
|
|
|
logger.log('info', 'trying to set this up now');
|
|
|
|
const sparkService = await sparkInstance.smartdaemon.addService({
|
|
|
|
name: 'spark',
|
|
|
|
version: sparkInstance.sparkInfo.projectInfo.version,
|
|
|
|
command: 'spark asdaemon',
|
|
|
|
description: 'spark daemon service',
|
|
|
|
workingDir: paths.packageDir,
|
|
|
|
});
|
|
|
|
await sparkService.save();
|
|
|
|
await sparkService.enable();
|
|
|
|
await sparkService.start();
|
|
|
|
});
|
|
|
|
|
|
|
|
smartcliInstance.addCommand('updatedaemon').subscribe(async (argvArg) => {
|
|
|
|
logger.log('ok', 'we are apparently trying to update the daemon for spark');
|
|
|
|
logger.log('info', 'trying to set this up now');
|
|
|
|
const sparkService = await sparkInstance.smartdaemon.addService({
|
|
|
|
name: 'spark',
|
|
|
|
version: sparkInstance.sparkInfo.projectInfo.version,
|
|
|
|
command: 'spark asdaemon',
|
|
|
|
description: 'spark daemon service',
|
|
|
|
workingDir: paths.packageDir,
|
|
|
|
});
|
|
|
|
await sparkService.reload();
|
|
|
|
});
|
|
|
|
|
|
|
|
smartcliInstance.addCommand('asdaemon').subscribe(async (argvArg) => {
|
|
|
|
logger.log('success', 'looks like we are running as daemon now');
|
|
|
|
logger.log('info', 'starting spark in daemon mode');
|
2024-12-20 00:29:21 +00:00
|
|
|
|
|
|
|
// lets determine the mode if specified
|
|
|
|
let mode = argvArg.mode;
|
|
|
|
if (mode === 'cloudly') {
|
|
|
|
await sparkInstance.sparkConfig.kvStore.writeKey('mode', 'cloudly');
|
|
|
|
} else if (mode === 'coreflow-node') {
|
|
|
|
await sparkInstance.sparkConfig.kvStore.writeKey('mode', 'coreflow-node');
|
|
|
|
} else if (mode) {
|
|
|
|
logger.log('error', 'unknown mode specified');
|
|
|
|
process.exit(1);
|
|
|
|
} else {
|
|
|
|
// mode is not specified by cli, lets get it from the config
|
|
|
|
mode = await sparkInstance.sparkConfig.kvStore.readKey('mode');
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!mode) {
|
|
|
|
logger.log('error', 'no mode specified by either cli or config');
|
|
|
|
process.exit(1);
|
|
|
|
} else if (mode === 'cloudly') {
|
|
|
|
sparkInstance.sparkUpdateManager.services.push({
|
|
|
|
name: `coreflow`,
|
|
|
|
image: `code.foss.global/serve.zone/cloudly`,
|
|
|
|
url: `cloudly`,
|
|
|
|
environment: `production`,
|
|
|
|
port: `3000`,
|
|
|
|
secretJson: {
|
|
|
|
SERVEZONE_PORT: `3000`,
|
|
|
|
SERVEZONE_ENVIRONMENT: `production`,
|
|
|
|
},
|
|
|
|
});
|
|
|
|
} else if (mode === 'coreflow-node') {
|
|
|
|
sparkInstance.sparkUpdateManager.services.push({
|
|
|
|
name: `coreflow`,
|
|
|
|
image: `code.foss.global/serve.zone/coreflow`,
|
|
|
|
url: `coreflow`,
|
|
|
|
environment: `production`,
|
|
|
|
port: `3000`,
|
|
|
|
secretJson: {
|
|
|
|
SERVEZONE_PORT: `3000`,
|
|
|
|
SERVEZONE_ENVIRONMENT: `production`,
|
|
|
|
},
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2024-05-08 18:49:10 +00:00
|
|
|
await sparkInstance.daemonStart();
|
|
|
|
});
|
|
|
|
|
|
|
|
smartcliInstance.addCommand('logs').subscribe(async (argvArg) => {
|
|
|
|
smartshellInstance.exec(`journalctl -u smartdaemon_spark -f`);
|
|
|
|
});
|
|
|
|
|
|
|
|
smartcliInstance.addCommand('prune').subscribe(async (argvArg) => {
|
|
|
|
// daemon
|
|
|
|
await smartshellInstance.exec(`systemctl stop smartdaemon_spark`);
|
|
|
|
logger.log('ok', 'stopped serverconfig daemon');
|
|
|
|
await plugins.smartdelay.delayFor(5000);
|
|
|
|
|
|
|
|
// services
|
|
|
|
await smartshellInstance.exec(`docker stack rm $(docker stack ls -q)`);
|
|
|
|
logger.log('ok', 'removed docker stacks');
|
|
|
|
await plugins.smartdelay.delayFor(5000);
|
|
|
|
|
|
|
|
// services
|
|
|
|
await smartshellInstance.exec(`docker service rm $(docker service ls -q)`);
|
|
|
|
logger.log('ok', 'removed docker services');
|
|
|
|
await plugins.smartdelay.delayFor(5000);
|
|
|
|
|
|
|
|
// secrets
|
|
|
|
await smartshellInstance.exec(`docker secret rm $(docker secret ls -q)`);
|
|
|
|
logger.log('ok', 'removed docker secrets');
|
|
|
|
await plugins.smartdelay.delayFor(5000);
|
|
|
|
|
|
|
|
// networks
|
|
|
|
await smartshellInstance.exec(`docker network rm szncorechat sznwebgateway`);
|
|
|
|
logger.log('ok', 'removed docker networks');
|
|
|
|
await plugins.smartdelay.delayFor(5000);
|
|
|
|
|
|
|
|
await smartshellInstance.exec(`docker system prune -af`);
|
|
|
|
logger.log('ok', 'pruned docker system');
|
|
|
|
await plugins.smartdelay.delayFor(5000);
|
|
|
|
|
|
|
|
// restart docker
|
|
|
|
await smartshellInstance.exec(`systemctl restart docker`);
|
|
|
|
logger.log('ok', 'restarted the docker service');
|
|
|
|
await plugins.smartdelay.delayFor(5000);
|
|
|
|
|
|
|
|
// serverconfig daemon
|
|
|
|
await smartshellInstance.exec(`systemctl start smartdaemon_spark`);
|
|
|
|
logger.log('ok', 'handed over control back to serverconfig daemon');
|
|
|
|
});
|
|
|
|
smartcliInstance.startParse();
|
|
|
|
};
|