Compare commits
8 Commits
Author | SHA1 | Date | |
---|---|---|---|
8296e79d7e | |||
23661f60e5 | |||
8a8e901205 | |||
180f44345c | |||
4cf570afbd | |||
8913faebde | |||
9e523de620 | |||
435b6e24a1 |
132
.gitlab-ci.yml
132
.gitlab-ci.yml
@ -1,132 +0,0 @@
|
|||||||
# gitzone ci_default_private
|
|
||||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
|
||||||
|
|
||||||
cache:
|
|
||||||
paths:
|
|
||||||
- .npmci_cache/
|
|
||||||
key: '$CI_BUILD_STAGE'
|
|
||||||
|
|
||||||
stages:
|
|
||||||
- security
|
|
||||||
- test
|
|
||||||
- release
|
|
||||||
- metadata
|
|
||||||
|
|
||||||
before_script:
|
|
||||||
- pnpm install -g pnpm
|
|
||||||
- pnpm install -g @shipzone/npmci
|
|
||||||
- npmci npm prepare
|
|
||||||
|
|
||||||
# ====================
|
|
||||||
# security stage
|
|
||||||
# ====================
|
|
||||||
# ====================
|
|
||||||
# security stage
|
|
||||||
# ====================
|
|
||||||
auditProductionDependencies:
|
|
||||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
|
||||||
stage: security
|
|
||||||
script:
|
|
||||||
- npmci command npm config set registry https://registry.npmjs.org
|
|
||||||
- npmci command pnpm audit --audit-level=high --prod
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
|
|
||||||
auditDevDependencies:
|
|
||||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
|
||||||
stage: security
|
|
||||||
script:
|
|
||||||
- npmci command npm config set registry https://registry.npmjs.org
|
|
||||||
- npmci command pnpm audit --audit-level=high --dev
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
allow_failure: true
|
|
||||||
|
|
||||||
# ====================
|
|
||||||
# test stage
|
|
||||||
# ====================
|
|
||||||
|
|
||||||
testStable:
|
|
||||||
stage: test
|
|
||||||
script:
|
|
||||||
- npmci node install stable
|
|
||||||
- npmci npm install
|
|
||||||
- npmci npm test
|
|
||||||
coverage: /\d+.?\d+?\%\s*coverage/
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
testBuild:
|
|
||||||
stage: test
|
|
||||||
script:
|
|
||||||
- npmci node install stable
|
|
||||||
- npmci npm install
|
|
||||||
- npmci command npm run build
|
|
||||||
coverage: /\d+.?\d+?\%\s*coverage/
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
release:
|
|
||||||
stage: release
|
|
||||||
script:
|
|
||||||
- npmci node install stable
|
|
||||||
- npmci npm publish
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
# ====================
|
|
||||||
# metadata stage
|
|
||||||
# ====================
|
|
||||||
codequality:
|
|
||||||
stage: metadata
|
|
||||||
allow_failure: true
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
script:
|
|
||||||
- npmci command npm install -g typescript
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci npm install
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- priv
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
stage: metadata
|
|
||||||
script:
|
|
||||||
- npmci trigger
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
pages:
|
|
||||||
stage: metadata
|
|
||||||
script:
|
|
||||||
- npmci node install lts
|
|
||||||
- npmci command npm install -g @git.zone/tsdoc
|
|
||||||
- npmci npm install
|
|
||||||
- npmci command tsdoc
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
artifacts:
|
|
||||||
expire_in: 1 week
|
|
||||||
paths:
|
|
||||||
- public
|
|
||||||
allow_failure: true
|
|
4
license
4
license
@ -1,4 +1,6 @@
|
|||||||
Copyright (c) 2019 Lossless GmbH (hello@lossless.com)
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2019 Task Venture Capital GmbH (hello@task.vc)
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
"githost": "gitlab.com",
|
"githost": "gitlab.com",
|
||||||
"gitscope": "losslessone/services/initzone",
|
"gitscope": "losslessone/services/initzone",
|
||||||
"gitrepo": "spark",
|
"gitrepo": "spark",
|
||||||
"description": "A tool to maintain and configure servers on the base OS level for the Servezone infrastructure.",
|
"description": "A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure.",
|
||||||
"npmPackagename": "@losslessone_private/spark",
|
"npmPackagename": "@losslessone_private/spark",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"projectDomain": "https://lossless.one",
|
"projectDomain": "https://lossless.one",
|
||||||
@ -20,7 +20,11 @@
|
|||||||
"continuous deployment",
|
"continuous deployment",
|
||||||
"deployment automation",
|
"deployment automation",
|
||||||
"service orchestration",
|
"service orchestration",
|
||||||
"node.js"
|
"node.js",
|
||||||
|
"task scheduling",
|
||||||
|
"CLI",
|
||||||
|
"logging",
|
||||||
|
"server maintenance"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
29
package.json
29
package.json
@ -1,8 +1,8 @@
|
|||||||
{
|
{
|
||||||
"name": "@serve.zone/spark",
|
"name": "@serve.zone/spark",
|
||||||
"version": "1.0.84",
|
"version": "1.0.88",
|
||||||
"private": false,
|
"private": false,
|
||||||
"description": "A tool to maintain and configure servers on the base OS level for the Servezone infrastructure.",
|
"description": "A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure.",
|
||||||
"main": "dist_ts/index.js",
|
"main": "dist_ts/index.js",
|
||||||
"typings": "dist_ts/index.d.ts",
|
"typings": "dist_ts/index.d.ts",
|
||||||
"author": "Task Venture Capital GmbH",
|
"author": "Task Venture Capital GmbH",
|
||||||
@ -16,28 +16,29 @@
|
|||||||
"spark": "./cli.js"
|
"spark": "./cli.js"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.1.29",
|
"@git.zone/tsbuild": "^2.1.80",
|
||||||
"@git.zone/tsrun": "^1.2.39",
|
"@git.zone/tsrun": "^1.2.39",
|
||||||
"@git.zone/tstest": "^1.0.60",
|
"@git.zone/tstest": "^1.0.60",
|
||||||
"@push.rocks/tapbundle": "^5.0.4",
|
"@push.rocks/tapbundle": "^5.0.4",
|
||||||
"@types/node": "20.10.0"
|
"@types/node": "20.14.2"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@apiclient.xyz/docker": "^1.0.103",
|
"@apiclient.xyz/docker": "^1.2.2",
|
||||||
"@push.rocks/npmextra": "^5.0.13",
|
"@push.rocks/npmextra": "^5.0.17",
|
||||||
"@push.rocks/projectinfo": "^5.0.1",
|
"@push.rocks/projectinfo": "^5.0.1",
|
||||||
"@push.rocks/qenv": "^6.0.5",
|
"@push.rocks/qenv": "^6.0.5",
|
||||||
"@push.rocks/smartcli": "^4.0.6",
|
"@push.rocks/smartcli": "^4.0.11",
|
||||||
"@push.rocks/smartdaemon": "^2.0.3",
|
"@push.rocks/smartdaemon": "^2.0.3",
|
||||||
"@push.rocks/smartdelay": "^3.0.5",
|
"@push.rocks/smartdelay": "^3.0.5",
|
||||||
"@push.rocks/smartfile": "^11.0.14",
|
"@push.rocks/smartfile": "^11.0.20",
|
||||||
"@push.rocks/smartjson": "^5.0.5",
|
"@push.rocks/smartjson": "^5.0.20",
|
||||||
"@push.rocks/smartlog": "^3.0.3",
|
"@push.rocks/smartlog": "^3.0.7",
|
||||||
"@push.rocks/smartlog-destination-local": "^9.0.0",
|
"@push.rocks/smartlog-destination-local": "^9.0.0",
|
||||||
"@push.rocks/smartpath": "^5.0.5",
|
"@push.rocks/smartpath": "^5.0.5",
|
||||||
"@push.rocks/smartshell": "^3.0.5",
|
"@push.rocks/smartshell": "^3.0.5",
|
||||||
"@push.rocks/smartupdate": "^2.0.4",
|
"@push.rocks/smartupdate": "^2.0.4",
|
||||||
"@push.rocks/taskbuffer": "^3.0.10"
|
"@push.rocks/taskbuffer": "^3.0.10",
|
||||||
|
"@serve.zone/interfaces": "^1.0.74"
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
"ts/**/*",
|
"ts/**/*",
|
||||||
@ -66,6 +67,10 @@
|
|||||||
"continuous deployment",
|
"continuous deployment",
|
||||||
"deployment automation",
|
"deployment automation",
|
||||||
"service orchestration",
|
"service orchestration",
|
||||||
"node.js"
|
"node.js",
|
||||||
|
"task scheduling",
|
||||||
|
"CLI",
|
||||||
|
"logging",
|
||||||
|
"server maintenance"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
8815
pnpm-lock.yaml
generated
8815
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
196
readme.md
196
readme.md
@ -1,8 +1,9 @@
|
|||||||
# @serve.zone/spark
|
# @serve.zone/spark
|
||||||
sparks the servezone services
|
A tool to maintain and configure servers on the base OS level for the Servezone infrastructure.
|
||||||
|
|
||||||
## Install
|
## Install
|
||||||
To install `@serve.zone/spark`, run the following command in your terminal:
|
To install `@serve.zone/spark`, run the following command in your terminal:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
npm install @serve.zone/spark --save
|
npm install @serve.zone/spark --save
|
||||||
```
|
```
|
||||||
@ -13,6 +14,7 @@ npm install @serve.zone/spark --save
|
|||||||
To use `@serve.zone/spark` in your project, you need to include and initiate it in your TypeScript project. Ensure you have TypeScript and the necessary build tools set up in your project.
|
To use `@serve.zone/spark` in your project, you need to include and initiate it in your TypeScript project. Ensure you have TypeScript and the necessary build tools set up in your project.
|
||||||
|
|
||||||
First, import `@serve.zone/spark`:
|
First, import `@serve.zone/spark`:
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
import { Spark } from '@serve.zone/spark';
|
import { Spark } from '@serve.zone/spark';
|
||||||
```
|
```
|
||||||
@ -73,7 +75,7 @@ sparkInstance.sparkTaskManager.taskmanager.addAndScheduleTask(myTask, '* * * * *
|
|||||||
|
|
||||||
The example above creates a simple task that logs a message every second, demonstrating how to use Spark's task manager for custom scheduled tasks.
|
The example above creates a simple task that logs a message every second, demonstrating how to use Spark's task manager for custom scheduled tasks.
|
||||||
|
|
||||||
### Advanced Configuration
|
### Detailed Service Management
|
||||||
For advanced configurations, including Docker and service management:
|
For advanced configurations, including Docker and service management:
|
||||||
|
|
||||||
- Use `SparkUpdateManager` to handle Docker image updates, service creation, and management.
|
- Use `SparkUpdateManager` to handle Docker image updates, service creation, and management.
|
||||||
@ -88,6 +90,194 @@ const newServiceDefinition = {...};
|
|||||||
await sparkInstance.sparkUpdateManager.createService(newServiceDefinition);
|
await sparkInstance.sparkUpdateManager.createService(newServiceDefinition);
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### CLI Commands
|
||||||
|
Spark provides several CLI commands to interact with and manage the system services:
|
||||||
|
|
||||||
|
#### Installing Spark as a Daemon
|
||||||
|
```shell
|
||||||
|
spark installdaemon
|
||||||
|
```
|
||||||
|
|
||||||
|
Sets up Spark as a system service to maintain server configurations automatically.
|
||||||
|
|
||||||
|
#### Updating the Daemon
|
||||||
|
```shell
|
||||||
|
spark updatedaemon
|
||||||
|
```
|
||||||
|
|
||||||
|
Updates the daemon service if a new version is available.
|
||||||
|
|
||||||
|
#### Running Spark as Daemon
|
||||||
|
```shell
|
||||||
|
spark asdaemon
|
||||||
|
```
|
||||||
|
|
||||||
|
Runs Spark in daemon mode, which is suitable for executing automated tasks.
|
||||||
|
|
||||||
|
#### Viewing Logs
|
||||||
|
```shell
|
||||||
|
spark logs
|
||||||
|
```
|
||||||
|
|
||||||
|
Views the logs of the Spark daemon service.
|
||||||
|
|
||||||
|
#### Cleaning Up Services
|
||||||
|
```shell
|
||||||
|
spark prune
|
||||||
|
```
|
||||||
|
|
||||||
|
Stops and cleans up all Docker services (stacks, networks, secrets, etc.) and prunes the Docker system.
|
||||||
|
|
||||||
|
### Programmatic Daemon Management
|
||||||
|
You can also manage the daemon programmatically as shown in the following examples:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { SmartDaemon } from '@push.rocks/smartdaemon';
|
||||||
|
import { Spark } from '@serve.zone/spark';
|
||||||
|
|
||||||
|
const sparkInstance = new Spark();
|
||||||
|
const smartDaemon = new SmartDaemon();
|
||||||
|
|
||||||
|
const startDaemon = async () => {
|
||||||
|
const sparkService = await smartDaemon.addService({
|
||||||
|
name: 'spark',
|
||||||
|
version: sparkInstance.sparkInfo.projectInfo.version,
|
||||||
|
command: 'spark asdaemon',
|
||||||
|
description: 'Spark daemon service',
|
||||||
|
workingDir: '/path/to/project',
|
||||||
|
});
|
||||||
|
await sparkService.save();
|
||||||
|
await sparkService.enable();
|
||||||
|
await sparkService.start();
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateDaemon = async () => {
|
||||||
|
const sparkService = await smartDaemon.addService({
|
||||||
|
name: 'spark',
|
||||||
|
version: sparkInstance.sparkInfo.projectInfo.version,
|
||||||
|
command: 'spark asdaemon',
|
||||||
|
description: 'Spark daemon service',
|
||||||
|
workingDir: '/path/to/project',
|
||||||
|
});
|
||||||
|
await sparkService.reload();
|
||||||
|
};
|
||||||
|
|
||||||
|
startDaemon();
|
||||||
|
updateDaemon();
|
||||||
|
```
|
||||||
|
|
||||||
|
This illustrates how to initiate and update the Spark daemon using the `SmartDaemon` class from `@push.rocks/smartdaemon`.
|
||||||
|
|
||||||
|
### Configuration Management
|
||||||
|
Extensive configuration management is possible through the `SparkLocalConfig` and other configuration classes. This feature allows you to make your application's behavior adaptable based on different environments and requirements.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Example on setting local config
|
||||||
|
import { SparkLocalConfig } from '@serve.zone/spark';
|
||||||
|
|
||||||
|
const localConfig = new SparkLocalConfig(sparkInstance);
|
||||||
|
await localConfig.kvStore.set('someKey', 'someValue');
|
||||||
|
|
||||||
|
// Retrieving a value from local config
|
||||||
|
const someConfigValue = await localConfig.kvStore.get('someKey');
|
||||||
|
|
||||||
|
console.log(someConfigValue); // Outputs: someValue
|
||||||
|
```
|
||||||
|
|
||||||
|
### Detailed Log Management
|
||||||
|
Logging is a crucial aspect of any automation tool, and `@serve.zone/spark` offers rich logging functionality through its built-in logging library.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { logger, Spark } from '@serve.zone/spark';
|
||||||
|
|
||||||
|
const sparkInstance = new Spark();
|
||||||
|
|
||||||
|
logger.log('info', 'Spark instance created.');
|
||||||
|
|
||||||
|
// Using logger in various levels of severity
|
||||||
|
logger.log('debug', 'This is a debug message');
|
||||||
|
logger.log('warn', 'This is a warning message');
|
||||||
|
logger.log('error', 'This is an error message');
|
||||||
|
logger.log('ok', 'This is a success message');
|
||||||
|
```
|
||||||
|
|
||||||
|
### Real-World Scenarios
|
||||||
|
|
||||||
|
#### Automated System Update and Restart
|
||||||
|
In real-world scenarios, you might want to automate system updates and reboots to ensure your services are running the latest security patches and features.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { Spark } from '@serve.zone/spark';
|
||||||
|
import { SmartShell } from '@push.rocks/smartshell';
|
||||||
|
|
||||||
|
const sparkInstance = new Spark();
|
||||||
|
const shell = new SmartShell({ executor: 'bash' });
|
||||||
|
|
||||||
|
const updateAndRestart = async () => {
|
||||||
|
await shell.exec('apt-get update && apt-get upgrade -y');
|
||||||
|
console.log('System updated.');
|
||||||
|
await shell.exec('reboot');
|
||||||
|
};
|
||||||
|
|
||||||
|
sparkInstance.sparkTaskManager.taskmanager.addAndScheduleTask(
|
||||||
|
{ name: 'updateAndRestart', taskFunction: updateAndRestart },
|
||||||
|
'0 3 * * 7' // Every Sunday at 3 AM
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
This example demonstrates creating and scheduling a task to update and restart the server every Sunday at 3 AM using Spark's task management capabilities.
|
||||||
|
|
||||||
|
#### Integrating with Docker for Service Deployment
|
||||||
|
Spark's tight integration with Docker makes it an excellent tool for deploying containerized applications across your infrastructure.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { Spark } from '@serve.zone/spark';
|
||||||
|
import { DockerHost } from '@apiclient.xyz/docker';
|
||||||
|
|
||||||
|
const sparkInstance = new Spark();
|
||||||
|
const dockerHost = new DockerHost({});
|
||||||
|
|
||||||
|
const deployService = async () => {
|
||||||
|
const image = await dockerHost.pullImage('my-docker-repo/my-service:latest');
|
||||||
|
const newService = await dockerHost.createService({
|
||||||
|
name: 'my-service',
|
||||||
|
image,
|
||||||
|
ports: ['80:8080'],
|
||||||
|
environmentVariables: {
|
||||||
|
NODE_ENV: 'production',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(`Service ${newService.name} deployed.`);
|
||||||
|
};
|
||||||
|
|
||||||
|
deployService();
|
||||||
|
```
|
||||||
|
|
||||||
|
This example demonstrates how to pull a Docker image and deploy it as a new service in your infrastructure using Spark's Docker integration.
|
||||||
|
|
||||||
|
### Managing Secrets
|
||||||
|
Managing secrets and sensitive data is crucial in any configuration and automation tool. Spark's integration with Docker allows you to handle secrets securely.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { Spark, SparkUpdateManager } from '@serve.zone/spark';
|
||||||
|
import { DockerSecret } from '@apiclient.xyz/docker';
|
||||||
|
|
||||||
|
const sparkInstance = new Spark();
|
||||||
|
const updateManager = new SparkUpdateManager(sparkInstance);
|
||||||
|
|
||||||
|
const createDockerSecret = async () => {
|
||||||
|
const secret = await DockerSecret.createSecret(updateManager.dockerHost, {
|
||||||
|
name: 'dbPassword',
|
||||||
|
contentArg: 'superSecretPassword',
|
||||||
|
});
|
||||||
|
console.log(`Secret ${secret.Spec.Name} created.`);
|
||||||
|
};
|
||||||
|
|
||||||
|
createDockerSecret();
|
||||||
|
```
|
||||||
|
|
||||||
|
This example shows how to create a Docker secret using Spark's `SparkUpdateManager` class, ensuring that sensitive information is securely stored and managed.
|
||||||
|
|
||||||
### Conclusion
|
### Conclusion
|
||||||
`@serve.zone/spark` provides a comprehensive toolkit for orchestrating and managing server environments and Docker-based services. By leveraging its CLI and programmatic interfaces, you can automate and streamline server operations, configurations, updates, and task scheduling, ensuring your infrastructure is responsive, updated, and maintained efficiently.
|
`@serve.zone/spark` is a comprehensive toolkit for orchestrating and managing server environments and Docker-based services. By leveraging its CLI and programmatic interfaces, you can automate and streamline server operations, configurations, updates, and task scheduling, ensuring your infrastructure is responsive, updated, and maintained efficiently.
|
||||||
undefined
|
undefined
|
@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@serve.zone/spark',
|
name: '@serve.zone/spark',
|
||||||
version: '1.0.84',
|
version: '1.0.88',
|
||||||
description: 'A tool to maintain and configure servers on the base OS level for the Servezone infrastructure.'
|
description: 'A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure.'
|
||||||
}
|
}
|
||||||
|
@ -7,6 +7,9 @@ export class SparkLocalConfig {
|
|||||||
|
|
||||||
constructor(sparkRefArg: Spark) {
|
constructor(sparkRefArg: Spark) {
|
||||||
this.sparkRef = sparkRefArg;
|
this.sparkRef = sparkRefArg;
|
||||||
this.kvStore = new plugins.npmextra.KeyValueStore('custom', 'spark');
|
this.kvStore = new plugins.npmextra.KeyValueStore({
|
||||||
|
typeArg: 'userHomeDir',
|
||||||
|
identityArg: 'spark',
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -9,7 +9,7 @@ export class SparkUpdateManager {
|
|||||||
public smartupdate: plugins.smartupdate.SmartUpdate;
|
public smartupdate: plugins.smartupdate.SmartUpdate;
|
||||||
constructor(sparkrefArg: Spark) {
|
constructor(sparkrefArg: Spark) {
|
||||||
this.sparkRef = sparkrefArg;
|
this.sparkRef = sparkrefArg;
|
||||||
this.dockerHost = new plugins.docker.DockerHost();
|
this.dockerHost = new plugins.docker.DockerHost({});
|
||||||
this.smartupdate = new plugins.smartupdate.SmartUpdate();
|
this.smartupdate = new plugins.smartupdate.SmartUpdate();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -27,9 +27,6 @@ export class SparkUpdateManager {
|
|||||||
plugins.path.join(paths.homeDir, 'serve.zone/spark/spark.json')
|
plugins.path.join(paths.homeDir, 'serve.zone/spark/spark.json')
|
||||||
))
|
))
|
||||||
) {
|
) {
|
||||||
const sparkJson = plugins.smartfile.fs.toObjectSync(
|
|
||||||
plugins.path.join(paths.homeDir, 'serve.zone/spark/spark.json')
|
|
||||||
);
|
|
||||||
const services: Array<{
|
const services: Array<{
|
||||||
name: string;
|
name: string;
|
||||||
image: string;
|
image: string;
|
||||||
@ -38,16 +35,45 @@ export class SparkUpdateManager {
|
|||||||
environment: string;
|
environment: string;
|
||||||
secretJson: any;
|
secretJson: any;
|
||||||
}> = [];
|
}> = [];
|
||||||
for (const serviceKey of Object.keys(sparkJson.services)) {
|
// lets add coreflow
|
||||||
services.push({
|
services.push({
|
||||||
name: serviceKey,
|
name: `coreflow`,
|
||||||
image: sparkJson.services[serviceKey].image,
|
image: `code.foss.global/serve.zone/coreflow`,
|
||||||
url: sparkJson.services[serviceKey].url,
|
url: `coreflow`,
|
||||||
environment: sparkJson.services[serviceKey].environment,
|
environment: `production`,
|
||||||
port: sparkJson.services[serviceKey].port,
|
port: `3000`,
|
||||||
secretJson: sparkJson.services[serviceKey].secretJson,
|
secretJson: {
|
||||||
});
|
SERVEZONE_PORT: `3000`,
|
||||||
}
|
SERVEZONE_ENVIRONMENT: `production`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
services.push({
|
||||||
|
name: `coretraffic`,
|
||||||
|
image: `code.foss.global/serve.zone/coretraffic`,
|
||||||
|
url: `coreflow`,
|
||||||
|
environment: `production`,
|
||||||
|
port: `3000`,
|
||||||
|
secretJson: {
|
||||||
|
SERVEZONE_PORT: `3000`,
|
||||||
|
SERVEZONE_ENVIRONMENT: `production`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
services.push({
|
||||||
|
name: `corelog`,
|
||||||
|
image: `code.foss.global/serve.zone/corelog`,
|
||||||
|
url: `coreflow`,
|
||||||
|
environment: `production`,
|
||||||
|
port: `3000`,
|
||||||
|
secretJson: {
|
||||||
|
SERVEZONE_PORT: `3000`,
|
||||||
|
SERVEZONE_ENVIRONMENT: `production`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// lets add coretraffic
|
||||||
|
|
||||||
for (const service of services) {
|
for (const service of services) {
|
||||||
const existingService = await plugins.docker.DockerService.getServiceByName(
|
const existingService = await plugins.docker.DockerService.getServiceByName(
|
||||||
this.dockerHost,
|
this.dockerHost,
|
||||||
@ -61,6 +87,7 @@ export class SparkUpdateManager {
|
|||||||
const needsUpdate: boolean = await existingService.needsUpdate();
|
const needsUpdate: boolean = await existingService.needsUpdate();
|
||||||
if (!needsUpdate) {
|
if (!needsUpdate) {
|
||||||
logger.log('info', `not needing update.`);
|
logger.log('info', `not needing update.`);
|
||||||
|
// we simply return here to end the functions
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
logger.log('ok', `${service.name} needs to be updated!`);
|
logger.log('ok', `${service.name} needs to be updated!`);
|
||||||
@ -74,7 +101,9 @@ export class SparkUpdateManager {
|
|||||||
const newServiceImage = await plugins.docker.DockerImage.createFromRegistry(
|
const newServiceImage = await plugins.docker.DockerImage.createFromRegistry(
|
||||||
this.dockerHost,
|
this.dockerHost,
|
||||||
{
|
{
|
||||||
imageUrl: service.image,
|
creationObject: {
|
||||||
|
imageUrl: service.image,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
const newServiceSecret = await plugins.docker.DockerSecret.createSecret(this.dockerHost, {
|
const newServiceSecret = await plugins.docker.DockerSecret.createSecret(this.dockerHost, {
|
||||||
@ -92,8 +121,9 @@ export class SparkUpdateManager {
|
|||||||
secrets: [newServiceSecret],
|
secrets: [newServiceSecret],
|
||||||
ports: [`${service.port}:${service.secretJson.SERVEZONE_PORT}`],
|
ports: [`${service.port}:${service.secretJson.SERVEZONE_PORT}`],
|
||||||
});
|
});
|
||||||
logger.log('success', 'updated all services!');
|
logger.log('ok', `updated service >>${newService.Spec.Name}<<!`);
|
||||||
}
|
}
|
||||||
|
logger.log('success', `updated ${services.length} services!`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,15 +1,7 @@
|
|||||||
import * as plugins from './spark.plugins.js';
|
import * as plugins from './spark.plugins.js';
|
||||||
import * as paths from './spark.paths.js';
|
import * as paths from './spark.paths.js';
|
||||||
|
import { commitinfo } from './00_commitinfo_data.js';
|
||||||
|
|
||||||
const projectInfoNpm = new plugins.projectinfo.ProjectinfoNpm(paths.packageDir);
|
const projectInfoNpm = new plugins.projectinfo.ProjectinfoNpm(paths.packageDir);
|
||||||
|
|
||||||
export const logger = new plugins.smartlog.Smartlog({
|
export const logger = plugins.smartlog.Smartlog.createForCommitinfo(commitinfo);
|
||||||
logContext: {
|
|
||||||
environment: 'production',
|
|
||||||
runtime: 'node',
|
|
||||||
zone: 'baremetal',
|
|
||||||
company: null,
|
|
||||||
companyunit: null,
|
|
||||||
containerName: 'spark',
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
@ -3,6 +3,11 @@ import * as path from 'path';
|
|||||||
|
|
||||||
export { path };
|
export { path };
|
||||||
|
|
||||||
|
// @serve.zone scope
|
||||||
|
import * as servezoneInterfaces from '@serve.zone/interfaces';
|
||||||
|
|
||||||
|
export { servezoneInterfaces };
|
||||||
|
|
||||||
// @apiclient.xyz scope
|
// @apiclient.xyz scope
|
||||||
import * as docker from '@apiclient.xyz/docker';
|
import * as docker from '@apiclient.xyz/docker';
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user