10 Commits

Author SHA1 Message Date
8296e79d7e 1.0.88 2024-06-13 15:20:00 +02:00
23661f60e5 fix(core): update 2024-06-13 15:19:59 +02:00
8a8e901205 1.0.87 2024-06-13 15:15:42 +02:00
180f44345c fix(core): update 2024-06-13 15:15:41 +02:00
4cf570afbd 1.0.86 2024-06-13 15:12:08 +02:00
8913faebde fix(core): update 2024-06-13 15:12:07 +02:00
9e523de620 1.0.85 2024-05-08 21:31:32 +02:00
435b6e24a1 fix(core): update 2024-05-08 21:31:31 +02:00
02e68ebb68 1.0.84 2024-05-08 20:53:35 +02:00
4e73bf7a0e fix(core): update 2024-05-08 20:53:35 +02:00
12 changed files with 5793 additions and 3612 deletions

View File

@ -1,132 +0,0 @@
# gitzone ci_default_private
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
cache:
paths:
- .npmci_cache/
key: '$CI_BUILD_STAGE'
stages:
- security
- test
- release
- metadata
before_script:
- pnpm install -g pnpm
- pnpm install -g @shipzone/npmci
- npmci npm prepare
# ====================
# security stage
# ====================
# ====================
# security stage
# ====================
auditProductionDependencies:
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
stage: security
script:
- npmci command npm config set registry https://registry.npmjs.org
- npmci command pnpm audit --audit-level=high --prod
tags:
- lossless
- docker
auditDevDependencies:
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
stage: security
script:
- npmci command npm config set registry https://registry.npmjs.org
- npmci command pnpm audit --audit-level=high --dev
tags:
- lossless
- docker
allow_failure: true
# ====================
# test stage
# ====================
testStable:
stage: test
script:
- npmci node install stable
- npmci npm install
- npmci npm test
coverage: /\d+.?\d+?\%\s*coverage/
tags:
- lossless
- docker
- notpriv
testBuild:
stage: test
script:
- npmci node install stable
- npmci npm install
- npmci command npm run build
coverage: /\d+.?\d+?\%\s*coverage/
tags:
- lossless
- docker
- notpriv
release:
stage: release
script:
- npmci node install stable
- npmci npm publish
only:
- tags
tags:
- lossless
- docker
- notpriv
# ====================
# metadata stage
# ====================
codequality:
stage: metadata
allow_failure: true
only:
- tags
script:
- npmci command npm install -g typescript
- npmci npm prepare
- npmci npm install
tags:
- lossless
- docker
- priv
trigger:
stage: metadata
script:
- npmci trigger
only:
- tags
tags:
- lossless
- docker
- notpriv
pages:
stage: metadata
script:
- npmci node install lts
- npmci command npm install -g @git.zone/tsdoc
- npmci npm install
- npmci command tsdoc
tags:
- lossless
- docker
- notpriv
only:
- tags
artifacts:
expire_in: 1 week
paths:
- public
allow_failure: true

View File

@ -1,4 +1,6 @@
Copyright (c) 2019 Lossless GmbH (hello@lossless.com)
The MIT License (MIT)
Copyright (c) 2019 Task Venture Capital GmbH (hello@task.vc)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@ -5,10 +5,27 @@
"githost": "gitlab.com",
"gitscope": "losslessone/services/initzone",
"gitrepo": "spark",
"description": "sparks the servezone services",
"description": "A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure.",
"npmPackagename": "@losslessone_private/spark",
"license": "MIT",
"projectDomain": "https://lossless.one"
"projectDomain": "https://lossless.one",
"keywords": [
"server management",
"devops",
"automation",
"docker",
"configuration management",
"daemon service",
"continuous integration",
"continuous deployment",
"deployment automation",
"service orchestration",
"node.js",
"task scheduling",
"CLI",
"logging",
"server maintenance"
]
}
},
"npmci": {

View File

@ -1,8 +1,8 @@
{
"name": "@serve.zone/spark",
"version": "1.0.83",
"version": "1.0.88",
"private": false,
"description": "sparks the servezone services",
"description": "A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure.",
"main": "dist_ts/index.js",
"typings": "dist_ts/index.d.ts",
"author": "Task Venture Capital GmbH",
@ -16,28 +16,29 @@
"spark": "./cli.js"
},
"devDependencies": {
"@git.zone/tsbuild": "^2.1.29",
"@git.zone/tsbuild": "^2.1.80",
"@git.zone/tsrun": "^1.2.39",
"@git.zone/tstest": "^1.0.60",
"@push.rocks/tapbundle": "^5.0.4",
"@types/node": "20.10.0"
"@types/node": "20.14.2"
},
"dependencies": {
"@apiclient.xyz/docker": "^1.0.103",
"@push.rocks/npmextra": "^5.0.13",
"@apiclient.xyz/docker": "^1.2.2",
"@push.rocks/npmextra": "^5.0.17",
"@push.rocks/projectinfo": "^5.0.1",
"@push.rocks/qenv": "^6.0.5",
"@push.rocks/smartcli": "^4.0.6",
"@push.rocks/smartcli": "^4.0.11",
"@push.rocks/smartdaemon": "^2.0.3",
"@push.rocks/smartdelay": "^3.0.5",
"@push.rocks/smartfile": "^11.0.14",
"@push.rocks/smartjson": "^5.0.5",
"@push.rocks/smartlog": "^3.0.3",
"@push.rocks/smartfile": "^11.0.20",
"@push.rocks/smartjson": "^5.0.20",
"@push.rocks/smartlog": "^3.0.7",
"@push.rocks/smartlog-destination-local": "^9.0.0",
"@push.rocks/smartpath": "^5.0.5",
"@push.rocks/smartshell": "^3.0.5",
"@push.rocks/smartupdate": "^2.0.4",
"@push.rocks/taskbuffer": "^3.0.10"
"@push.rocks/taskbuffer": "^3.0.10",
"@serve.zone/interfaces": "^1.0.74"
},
"files": [
"ts/**/*",
@ -54,5 +55,22 @@
"browserslist": [
"last 1 chrome versions"
],
"type": "module"
"type": "module",
"keywords": [
"server management",
"devops",
"automation",
"docker",
"configuration management",
"daemon service",
"continuous integration",
"continuous deployment",
"deployment automation",
"service orchestration",
"node.js",
"task scheduling",
"CLI",
"logging",
"server maintenance"
]
}

8815
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

2
readme.hints.md Normal file
View File

@ -0,0 +1,2 @@
- this package is part of serve.zone
- it is used to maintain and configure servers on the base OS level

301
readme.md
View File

@ -1,40 +1,283 @@
# @losslessone/services/initzone/spark
sparks the servezone services
# @serve.zone/spark
A tool to maintain and configure servers on the base OS level for the Servezone infrastructure.
## Availabililty and Links
* [npmjs.org (npm package)](https://www.npmjs.com/package/@losslessone_private/spark)
* [gitlab.com (source)](https://gitlab.com/losslessone/services/initzone/spark)
* [github.com (source mirror)](https://github.com/losslessone/services/initzone/spark)
* [docs (typedoc)](https://losslessone/services/initzone.gitlab.io/spark/)
## Install
To install `@serve.zone/spark`, run the following command in your terminal:
## Status for master
Status Category | Status Badge
-- | --
GitLab Pipelines | [![pipeline status](https://gitlab.com/losslessone/services/initzone/spark/badges/master/pipeline.svg)](https://lossless.cloud)
GitLab Pipline Test Coverage | [![coverage report](https://gitlab.com/losslessone/services/initzone/spark/badges/master/coverage.svg)](https://lossless.cloud)
npm | [![npm downloads per month](https://badgen.net/npm/dy/@losslessone_private/spark)](https://lossless.cloud)
Snyk | [![Known Vulnerabilities](https://badgen.net/snyk/losslessone/services/initzone/spark)](https://lossless.cloud)
TypeScript Support | [![TypeScript](https://badgen.net/badge/TypeScript/>=%203.x/blue?icon=typescript)](https://lossless.cloud)
node Support | [![node](https://img.shields.io/badge/node->=%2010.x.x-blue.svg)](https://nodejs.org/dist/latest-v10.x/docs/api/)
Code Style | [![Code Style](https://badgen.net/badge/style/prettier/purple)](https://lossless.cloud)
PackagePhobia (total standalone install weight) | [![PackagePhobia](https://badgen.net/packagephobia/install/@losslessone_private/spark)](https://lossless.cloud)
PackagePhobia (package size on registry) | [![PackagePhobia](https://badgen.net/packagephobia/publish/@losslessone_private/spark)](https://lossless.cloud)
BundlePhobia (total size when bundled) | [![BundlePhobia](https://badgen.net/bundlephobia/minzip/@losslessone_private/spark)](https://lossless.cloud)
```sh
npm install @serve.zone/spark --save
```
## Usage
## Contribution
### Getting Started
To use `@serve.zone/spark` in your project, you need to include and initiate it in your TypeScript project. Ensure you have TypeScript and the necessary build tools set up in your project.
We are always happy for code contributions. If you are not the code contributing type that is ok. Still, maintaining Open Source repositories takes considerable time and thought. If you like the quality of what we do and our modules are useful to you we would appreciate a little monthly contribution: You can [contribute one time](https://lossless.link/contribute-onetime) or [contribute monthly](https://lossless.link/contribute). :)
First, import `@serve.zone/spark`:
```typescript
import { Spark } from '@serve.zone/spark';
```
## Contribution
### Initializing Spark
Create an instance of the `Spark` class to start using Spark. This instance will serve as the main entry point for interacting with the Spark functionalities.
We are always happy for code contributions. If you are not the code contributing type that is ok. Still, maintaining Open Source repositories takes considerable time and thought. If you like the quality of what we do and our modules are useful to you we would appreciate a little monthly contribution: You can [contribute one time](https://lossless.link/contribute-onetime) or [contribute monthly](https://lossless.link/contribute). :)
```typescript
const sparkInstance = new Spark();
```
For further information read the linked docs at the top of this readme.
### Running Spark as a Daemon
To run Spark as a daemon, which is useful for maintaining and configuring servers on the base OS level, use the CLI feature bundled with Spark. This should ideally be handled outside of your code through a command-line terminal but can also be automated within your Node.js scripts if required.
## Legal
> MIT licensed | **©** [Task Venture Capital GmbH](https://task.vc)
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy)
```shell
spark installdaemon
```
The command above sets up Spark as a system service, enabling it to run and maintain server configurations automatically.
### Updating Spark or Maintained Services
Spark can self-update and manage updates for its maintained services. Trigger an update check and process by calling the `updateServices` method on the Spark instance.
```typescript
await sparkInstance.sparkUpdateManager.updateServices();
```
### Managing Configuration and Logging
Spark allows for extensive configuration and logging customization. Use the `SparkLocalConfig` and logging features to tailor Spark's operation to your needs.
```typescript
// Accessing the local configuration
const localConfig = sparkInstance.sparkLocalConfig;
// Utilizing the logger for custom log messages
import { logger } from '@serve.zone/spark';
logger.log('info', 'Custom log message');
```
### Advanced Usage
`@serve.zone/spark` offers a suite of tools for detailed server and service management, including but not limited to task scheduling, daemon management, and service updates. Explore the `SparkTaskManager` for scheduling specific tasks, `SparkUpdateManager` for handling service updates, and `SparkLocalConfig` for configuration.
### Example: Scheduling Custom Tasks
```typescript
import { SparkTaskManager } from '@serve.zone/spark';
const sparkInstance = new Spark();
const myTask = {
name: 'customTask',
taskFunction: async () => {
console.log('Running custom task');
},
};
sparkInstance.sparkTaskManager.taskmanager.addAndScheduleTask(myTask, '* * * * * *');
```
The example above creates a simple task that logs a message every second, demonstrating how to use Spark's task manager for custom scheduled tasks.
### Detailed Service Management
For advanced configurations, including Docker and service management:
- Use `SparkUpdateManager` to handle Docker image updates, service creation, and management.
- Access and modify Docker and service configurations through Spark's integration with configuration files and environment variables.
```typescript
// Managing Docker services with Spark
await sparkInstance.sparkUpdateManager.dockerHost.someDockerMethod();
// Example: Creating a Docker service
const newServiceDefinition = {...};
await sparkInstance.sparkUpdateManager.createService(newServiceDefinition);
```
### CLI Commands
Spark provides several CLI commands to interact with and manage the system services:
#### Installing Spark as a Daemon
```shell
spark installdaemon
```
Sets up Spark as a system service to maintain server configurations automatically.
#### Updating the Daemon
```shell
spark updatedaemon
```
Updates the daemon service if a new version is available.
#### Running Spark as Daemon
```shell
spark asdaemon
```
Runs Spark in daemon mode, which is suitable for executing automated tasks.
#### Viewing Logs
```shell
spark logs
```
Views the logs of the Spark daemon service.
#### Cleaning Up Services
```shell
spark prune
```
Stops and cleans up all Docker services (stacks, networks, secrets, etc.) and prunes the Docker system.
### Programmatic Daemon Management
You can also manage the daemon programmatically as shown in the following examples:
```typescript
import { SmartDaemon } from '@push.rocks/smartdaemon';
import { Spark } from '@serve.zone/spark';
const sparkInstance = new Spark();
const smartDaemon = new SmartDaemon();
const startDaemon = async () => {
const sparkService = await smartDaemon.addService({
name: 'spark',
version: sparkInstance.sparkInfo.projectInfo.version,
command: 'spark asdaemon',
description: 'Spark daemon service',
workingDir: '/path/to/project',
});
await sparkService.save();
await sparkService.enable();
await sparkService.start();
};
const updateDaemon = async () => {
const sparkService = await smartDaemon.addService({
name: 'spark',
version: sparkInstance.sparkInfo.projectInfo.version,
command: 'spark asdaemon',
description: 'Spark daemon service',
workingDir: '/path/to/project',
});
await sparkService.reload();
};
startDaemon();
updateDaemon();
```
This illustrates how to initiate and update the Spark daemon using the `SmartDaemon` class from `@push.rocks/smartdaemon`.
### Configuration Management
Extensive configuration management is possible through the `SparkLocalConfig` and other configuration classes. This feature allows you to make your application's behavior adaptable based on different environments and requirements.
```typescript
// Example on setting local config
import { SparkLocalConfig } from '@serve.zone/spark';
const localConfig = new SparkLocalConfig(sparkInstance);
await localConfig.kvStore.set('someKey', 'someValue');
// Retrieving a value from local config
const someConfigValue = await localConfig.kvStore.get('someKey');
console.log(someConfigValue); // Outputs: someValue
```
### Detailed Log Management
Logging is a crucial aspect of any automation tool, and `@serve.zone/spark` offers rich logging functionality through its built-in logging library.
```typescript
import { logger, Spark } from '@serve.zone/spark';
const sparkInstance = new Spark();
logger.log('info', 'Spark instance created.');
// Using logger in various levels of severity
logger.log('debug', 'This is a debug message');
logger.log('warn', 'This is a warning message');
logger.log('error', 'This is an error message');
logger.log('ok', 'This is a success message');
```
### Real-World Scenarios
#### Automated System Update and Restart
In real-world scenarios, you might want to automate system updates and reboots to ensure your services are running the latest security patches and features.
```typescript
import { Spark } from '@serve.zone/spark';
import { SmartShell } from '@push.rocks/smartshell';
const sparkInstance = new Spark();
const shell = new SmartShell({ executor: 'bash' });
const updateAndRestart = async () => {
await shell.exec('apt-get update && apt-get upgrade -y');
console.log('System updated.');
await shell.exec('reboot');
};
sparkInstance.sparkTaskManager.taskmanager.addAndScheduleTask(
{ name: 'updateAndRestart', taskFunction: updateAndRestart },
'0 3 * * 7' // Every Sunday at 3 AM
);
```
This example demonstrates creating and scheduling a task to update and restart the server every Sunday at 3 AM using Spark's task management capabilities.
#### Integrating with Docker for Service Deployment
Spark's tight integration with Docker makes it an excellent tool for deploying containerized applications across your infrastructure.
```typescript
import { Spark } from '@serve.zone/spark';
import { DockerHost } from '@apiclient.xyz/docker';
const sparkInstance = new Spark();
const dockerHost = new DockerHost({});
const deployService = async () => {
const image = await dockerHost.pullImage('my-docker-repo/my-service:latest');
const newService = await dockerHost.createService({
name: 'my-service',
image,
ports: ['80:8080'],
environmentVariables: {
NODE_ENV: 'production',
},
});
console.log(`Service ${newService.name} deployed.`);
};
deployService();
```
This example demonstrates how to pull a Docker image and deploy it as a new service in your infrastructure using Spark's Docker integration.
### Managing Secrets
Managing secrets and sensitive data is crucial in any configuration and automation tool. Spark's integration with Docker allows you to handle secrets securely.
```typescript
import { Spark, SparkUpdateManager } from '@serve.zone/spark';
import { DockerSecret } from '@apiclient.xyz/docker';
const sparkInstance = new Spark();
const updateManager = new SparkUpdateManager(sparkInstance);
const createDockerSecret = async () => {
const secret = await DockerSecret.createSecret(updateManager.dockerHost, {
name: 'dbPassword',
contentArg: 'superSecretPassword',
});
console.log(`Secret ${secret.Spec.Name} created.`);
};
createDockerSecret();
```
This example shows how to create a Docker secret using Spark's `SparkUpdateManager` class, ensuring that sensitive information is securely stored and managed.
### Conclusion
`@serve.zone/spark` is a comprehensive toolkit for orchestrating and managing server environments and Docker-based services. By leveraging its CLI and programmatic interfaces, you can automate and streamline server operations, configurations, updates, and task scheduling, ensuring your infrastructure is responsive, updated, and maintained efficiently.
undefined

View File

@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@serve.zone/spark',
version: '1.0.83',
description: 'sparks the servezone services'
version: '1.0.88',
description: 'A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure.'
}

View File

@ -7,6 +7,9 @@ export class SparkLocalConfig {
constructor(sparkRefArg: Spark) {
this.sparkRef = sparkRefArg;
this.kvStore = new plugins.npmextra.KeyValueStore('custom', 'spark');
this.kvStore = new plugins.npmextra.KeyValueStore({
typeArg: 'userHomeDir',
identityArg: 'spark',
});
}
}

View File

@ -9,7 +9,7 @@ export class SparkUpdateManager {
public smartupdate: plugins.smartupdate.SmartUpdate;
constructor(sparkrefArg: Spark) {
this.sparkRef = sparkrefArg;
this.dockerHost = new plugins.docker.DockerHost();
this.dockerHost = new plugins.docker.DockerHost({});
this.smartupdate = new plugins.smartupdate.SmartUpdate();
}
@ -27,9 +27,6 @@ export class SparkUpdateManager {
plugins.path.join(paths.homeDir, 'serve.zone/spark/spark.json')
))
) {
const sparkJson = plugins.smartfile.fs.toObjectSync(
plugins.path.join(paths.homeDir, 'serve.zone/spark/spark.json')
);
const services: Array<{
name: string;
image: string;
@ -38,16 +35,45 @@ export class SparkUpdateManager {
environment: string;
secretJson: any;
}> = [];
for (const serviceKey of Object.keys(sparkJson.services)) {
services.push({
name: serviceKey,
image: sparkJson.services[serviceKey].image,
url: sparkJson.services[serviceKey].url,
environment: sparkJson.services[serviceKey].environment,
port: sparkJson.services[serviceKey].port,
secretJson: sparkJson.services[serviceKey].secretJson,
});
}
// lets add coreflow
services.push({
name: `coreflow`,
image: `code.foss.global/serve.zone/coreflow`,
url: `coreflow`,
environment: `production`,
port: `3000`,
secretJson: {
SERVEZONE_PORT: `3000`,
SERVEZONE_ENVIRONMENT: `production`,
},
});
services.push({
name: `coretraffic`,
image: `code.foss.global/serve.zone/coretraffic`,
url: `coreflow`,
environment: `production`,
port: `3000`,
secretJson: {
SERVEZONE_PORT: `3000`,
SERVEZONE_ENVIRONMENT: `production`,
},
});
services.push({
name: `corelog`,
image: `code.foss.global/serve.zone/corelog`,
url: `coreflow`,
environment: `production`,
port: `3000`,
secretJson: {
SERVEZONE_PORT: `3000`,
SERVEZONE_ENVIRONMENT: `production`,
},
});
// lets add coretraffic
for (const service of services) {
const existingService = await plugins.docker.DockerService.getServiceByName(
this.dockerHost,
@ -61,6 +87,7 @@ export class SparkUpdateManager {
const needsUpdate: boolean = await existingService.needsUpdate();
if (!needsUpdate) {
logger.log('info', `not needing update.`);
// we simply return here to end the functions
return;
}
logger.log('ok', `${service.name} needs to be updated!`);
@ -74,7 +101,9 @@ export class SparkUpdateManager {
const newServiceImage = await plugins.docker.DockerImage.createFromRegistry(
this.dockerHost,
{
imageUrl: service.image,
creationObject: {
imageUrl: service.image,
},
}
);
const newServiceSecret = await plugins.docker.DockerSecret.createSecret(this.dockerHost, {
@ -92,8 +121,9 @@ export class SparkUpdateManager {
secrets: [newServiceSecret],
ports: [`${service.port}:${service.secretJson.SERVEZONE_PORT}`],
});
logger.log('success', 'updated all services!');
logger.log('ok', `updated service >>${newService.Spec.Name}<<!`);
}
logger.log('success', `updated ${services.length} services!`);
}
}
}

View File

@ -1,15 +1,7 @@
import * as plugins from './spark.plugins.js';
import * as paths from './spark.paths.js';
import { commitinfo } from './00_commitinfo_data.js';
const projectInfoNpm = new plugins.projectinfo.ProjectinfoNpm(paths.packageDir);
export const logger = new plugins.smartlog.Smartlog({
logContext: {
environment: 'production',
runtime: 'node',
zone: 'baremetal',
company: null,
companyunit: null,
containerName: 'spark',
}
});
export const logger = plugins.smartlog.Smartlog.createForCommitinfo(commitinfo);

View File

@ -3,6 +3,11 @@ import * as path from 'path';
export { path };
// @serve.zone scope
import * as servezoneInterfaces from '@serve.zone/interfaces';
export { servezoneInterfaces };
// @apiclient.xyz scope
import * as docker from '@apiclient.xyz/docker';