fix(core): update

This commit is contained in:
Philipp Kunz 2024-06-13 15:12:07 +02:00
parent 9e523de620
commit 8913faebde
7 changed files with 5265 additions and 2806 deletions

View File

@ -1,4 +1,6 @@
Copyright (c) 2019 Lossless GmbH (hello@lossless.com)
The MIT License (MIT)
Copyright (c) 2019 Task Venture Capital GmbH (hello@task.vc)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@ -16,28 +16,29 @@
"spark": "./cli.js"
},
"devDependencies": {
"@git.zone/tsbuild": "^2.1.29",
"@git.zone/tsbuild": "^2.1.80",
"@git.zone/tsrun": "^1.2.39",
"@git.zone/tstest": "^1.0.60",
"@push.rocks/tapbundle": "^5.0.4",
"@types/node": "20.10.0"
"@types/node": "20.14.2"
},
"dependencies": {
"@apiclient.xyz/docker": "^1.0.103",
"@push.rocks/npmextra": "^5.0.13",
"@apiclient.xyz/docker": "^1.2.2",
"@push.rocks/npmextra": "^5.0.17",
"@push.rocks/projectinfo": "^5.0.1",
"@push.rocks/qenv": "^6.0.5",
"@push.rocks/smartcli": "^4.0.6",
"@push.rocks/smartcli": "^4.0.11",
"@push.rocks/smartdaemon": "^2.0.3",
"@push.rocks/smartdelay": "^3.0.5",
"@push.rocks/smartfile": "^11.0.14",
"@push.rocks/smartjson": "^5.0.5",
"@push.rocks/smartlog": "^3.0.3",
"@push.rocks/smartfile": "^11.0.20",
"@push.rocks/smartjson": "^5.0.20",
"@push.rocks/smartlog": "^3.0.7",
"@push.rocks/smartlog-destination-local": "^9.0.0",
"@push.rocks/smartpath": "^5.0.5",
"@push.rocks/smartshell": "^3.0.5",
"@push.rocks/smartupdate": "^2.0.4",
"@push.rocks/taskbuffer": "^3.0.10"
"@push.rocks/taskbuffer": "^3.0.10",
"@serve.zone/interfaces": "^1.0.74"
},
"files": [
"ts/**/*",

7881
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@serve.zone/spark',
version: '1.0.85',
version: '1.0.86',
description: 'A tool to maintain and configure servers on the base OS level for the Servezone infrastructure.'
}

View File

@ -9,7 +9,7 @@ export class SparkUpdateManager {
public smartupdate: plugins.smartupdate.SmartUpdate;
constructor(sparkrefArg: Spark) {
this.sparkRef = sparkrefArg;
this.dockerHost = new plugins.docker.DockerHost();
this.dockerHost = new plugins.docker.DockerHost({});
this.smartupdate = new plugins.smartupdate.SmartUpdate();
}
@ -27,9 +27,6 @@ export class SparkUpdateManager {
plugins.path.join(paths.homeDir, 'serve.zone/spark/spark.json')
))
) {
const sparkJson = plugins.smartfile.fs.toObjectSync(
plugins.path.join(paths.homeDir, 'serve.zone/spark/spark.json')
);
const services: Array<{
name: string;
image: string;
@ -38,16 +35,45 @@ export class SparkUpdateManager {
environment: string;
secretJson: any;
}> = [];
for (const serviceKey of Object.keys(sparkJson.services)) {
// lets add coreflow
services.push({
name: serviceKey,
image: sparkJson.services[serviceKey].image,
url: sparkJson.services[serviceKey].url,
environment: sparkJson.services[serviceKey].environment,
port: sparkJson.services[serviceKey].port,
secretJson: sparkJson.services[serviceKey].secretJson,
name: `coreflow`,
image: `code.foss.global/serve.zone/coreflow`,
url: `coreflow`,
environment: `production`,
port: `3000`,
secretJson: {
SERVEZONE_PORT: `3000`,
SERVEZONE_ENVIRONMENT: `production`,
},
});
}
services.push({
name: `coretraffic`,
image: `code.foss.global/serve.zone/coretraffic`,
url: `coreflow`,
environment: `production`,
port: `3000`,
secretJson: {
SERVEZONE_PORT: `3000`,
SERVEZONE_ENVIRONMENT: `production`,
},
});
services.push({
name: `corelog`,
image: `code.foss.global/serve.zone/corelog`,
url: `coreflow`,
environment: `production`,
port: `3000`,
secretJson: {
SERVEZONE_PORT: `3000`,
SERVEZONE_ENVIRONMENT: `production`,
},
});
// lets add coretraffic
for (const service of services) {
const existingService = await plugins.docker.DockerService.getServiceByName(
this.dockerHost,
@ -61,6 +87,7 @@ export class SparkUpdateManager {
const needsUpdate: boolean = await existingService.needsUpdate();
if (!needsUpdate) {
logger.log('info', `not needing update.`);
// we simply return here to end the functions
return;
}
logger.log('ok', `${service.name} needs to be updated!`);
@ -74,7 +101,9 @@ export class SparkUpdateManager {
const newServiceImage = await plugins.docker.DockerImage.createFromRegistry(
this.dockerHost,
{
creationObject: {
imageUrl: service.image,
},
}
);
const newServiceSecret = await plugins.docker.DockerSecret.createSecret(this.dockerHost, {
@ -92,8 +121,9 @@ export class SparkUpdateManager {
secrets: [newServiceSecret],
ports: [`${service.port}:${service.secretJson.SERVEZONE_PORT}`],
});
logger.log('success', 'updated all services!');
}
logger.log('ok', `updated service >>${newService.Spec.Name}<<!`);
}
logger.log('success', `updated ${services.length} services!`);
}
}
}

View File

@ -1,15 +1,7 @@
import * as plugins from './spark.plugins.js';
import * as paths from './spark.paths.js';
import { commitinfo } from './00_commitinfo_data.js';
const projectInfoNpm = new plugins.projectinfo.ProjectinfoNpm(paths.packageDir);
export const logger = new plugins.smartlog.Smartlog({
logContext: {
environment: 'production',
runtime: 'node',
zone: 'baremetal',
company: null,
companyunit: null,
containerName: 'spark',
}
});
export const logger = plugins.smartlog.Smartlog.createForCommitinfo(commitinfo);

View File

@ -3,6 +3,11 @@ import * as path from 'path';
export { path };
// @serve.zone scope
import * as servezoneInterfaces from '@serve.zone/interfaces';
export { servezoneInterfaces };
// @apiclient.xyz scope
import * as docker from '@apiclient.xyz/docker';