Compare commits
No commits in common. "master" and "v1.0.87" have entirely different histories.
40
changelog.md
40
changelog.md
@ -1,40 +0,0 @@
|
|||||||
# Changelog
|
|
||||||
|
|
||||||
## 2024-12-20 - 1.2.2 - fix(core)
|
|
||||||
Refactored configuration management classes and improved service update handling
|
|
||||||
|
|
||||||
- Replaced SparkLocalConfig with SparkConfig for configuration management.
|
|
||||||
- Improved service handling and update check logic.
|
|
||||||
- Consolidated service definition and update logic for better maintainability.
|
|
||||||
|
|
||||||
## 2024-12-19 - 1.2.1 - fix(taskmanager)
|
|
||||||
Remove checkinSlackTask from SparkTaskManager for streamlined task management
|
|
||||||
|
|
||||||
- checkinSlackTask has been removed from the task manager class.
|
|
||||||
- Removal of the slack check-in task allows the system to focus on essential update tasks.
|
|
||||||
|
|
||||||
## 2024-12-18 - 1.2.0 - feat(core)
|
|
||||||
Initial commit of the Spark project with core functionalities for server management and integration with Docker.
|
|
||||||
|
|
||||||
- Add core functionalities for server maintenance and configuration.
|
|
||||||
- Integrate Docker for advanced task scheduling and service management.
|
|
||||||
- Provide CLI commands for daemon management and task execution.
|
|
||||||
|
|
||||||
## 2024-12-18 - 1.1.0 - feat(core)
|
|
||||||
Update package dependencies and improve API integration.
|
|
||||||
|
|
||||||
- Updated devDependencies and dependencies in package.json.
|
|
||||||
- Integrated new package @serve.zone/api.
|
|
||||||
- Updated identityArg in SparkLocalConfig for userHomeDir kvStore.
|
|
||||||
|
|
||||||
## 2024-06-13 - 1.0.85 to 1.0.90 - Core Updates
|
|
||||||
Routine updates and fixes to core functionality.
|
|
||||||
|
|
||||||
- Updated core component throughout versions for enhanced stability
|
|
||||||
- Incremental improvements applied on versions 1.0.85 to 1.0.90
|
|
||||||
|
|
||||||
## 2024-05-08 - 1.0.82 to 1.0.85 - Core Enhancements
|
|
||||||
Consistent updates made to improve core operations.
|
|
||||||
|
|
||||||
- Updates focused on core functionality for improved performance
|
|
||||||
- Series of updates applied from versions 1.0.82 to 1.0.85
|
|
@ -5,7 +5,7 @@
|
|||||||
"githost": "gitlab.com",
|
"githost": "gitlab.com",
|
||||||
"gitscope": "losslessone/services/initzone",
|
"gitscope": "losslessone/services/initzone",
|
||||||
"gitrepo": "spark",
|
"gitrepo": "spark",
|
||||||
"description": "A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure and used by @serve.zone/cloudly as a cluster node server system manager.",
|
"description": "A tool to maintain and configure servers on the base OS level for the Servezone infrastructure.",
|
||||||
"npmPackagename": "@losslessone_private/spark",
|
"npmPackagename": "@losslessone_private/spark",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"projectDomain": "https://lossless.one",
|
"projectDomain": "https://lossless.one",
|
||||||
@ -20,15 +20,7 @@
|
|||||||
"continuous deployment",
|
"continuous deployment",
|
||||||
"deployment automation",
|
"deployment automation",
|
||||||
"service orchestration",
|
"service orchestration",
|
||||||
"node.js",
|
"node.js"
|
||||||
"task scheduling",
|
|
||||||
"CLI",
|
|
||||||
"logging",
|
|
||||||
"server maintenance",
|
|
||||||
"serve.zone",
|
|
||||||
"cluster management",
|
|
||||||
"system manager",
|
|
||||||
"server configuration"
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -36,8 +28,5 @@
|
|||||||
"npmGlobalTools": [],
|
"npmGlobalTools": [],
|
||||||
"npmAccessLevel": "private",
|
"npmAccessLevel": "private",
|
||||||
"npmRegistryUrl": "verdaccio.lossless.one"
|
"npmRegistryUrl": "verdaccio.lossless.one"
|
||||||
},
|
|
||||||
"tsdoc": {
|
|
||||||
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
35
package.json
35
package.json
@ -1,8 +1,8 @@
|
|||||||
{
|
{
|
||||||
"name": "@serve.zone/spark",
|
"name": "@serve.zone/spark",
|
||||||
"version": "1.2.2",
|
"version": "1.0.87",
|
||||||
"private": false,
|
"private": false,
|
||||||
"description": "A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure and used by @serve.zone/cloudly as a cluster node server system manager.",
|
"description": "A tool to maintain and configure servers on the base OS level for the Servezone infrastructure.",
|
||||||
"main": "dist_ts/index.js",
|
"main": "dist_ts/index.js",
|
||||||
"typings": "dist_ts/index.d.ts",
|
"typings": "dist_ts/index.d.ts",
|
||||||
"author": "Task Venture Capital GmbH",
|
"author": "Task Venture Capital GmbH",
|
||||||
@ -16,30 +16,29 @@
|
|||||||
"spark": "./cli.js"
|
"spark": "./cli.js"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.2.0",
|
"@git.zone/tsbuild": "^2.1.80",
|
||||||
"@git.zone/tsrun": "^1.3.3",
|
"@git.zone/tsrun": "^1.2.39",
|
||||||
"@git.zone/tstest": "^1.0.60",
|
"@git.zone/tstest": "^1.0.60",
|
||||||
"@push.rocks/tapbundle": "^5.5.3",
|
"@push.rocks/tapbundle": "^5.0.4",
|
||||||
"@types/node": "22.10.2"
|
"@types/node": "20.14.2"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@apiclient.xyz/docker": "^1.2.7",
|
"@apiclient.xyz/docker": "^1.2.2",
|
||||||
"@push.rocks/npmextra": "^5.1.2",
|
"@push.rocks/npmextra": "^5.0.17",
|
||||||
"@push.rocks/projectinfo": "^5.0.1",
|
"@push.rocks/projectinfo": "^5.0.1",
|
||||||
"@push.rocks/qenv": "^6.1.0",
|
"@push.rocks/qenv": "^6.0.5",
|
||||||
"@push.rocks/smartcli": "^4.0.11",
|
"@push.rocks/smartcli": "^4.0.11",
|
||||||
"@push.rocks/smartdaemon": "^2.0.3",
|
"@push.rocks/smartdaemon": "^2.0.3",
|
||||||
"@push.rocks/smartdelay": "^3.0.5",
|
"@push.rocks/smartdelay": "^3.0.5",
|
||||||
"@push.rocks/smartfile": "^11.0.23",
|
"@push.rocks/smartfile": "^11.0.20",
|
||||||
"@push.rocks/smartjson": "^5.0.20",
|
"@push.rocks/smartjson": "^5.0.20",
|
||||||
"@push.rocks/smartlog": "^3.0.7",
|
"@push.rocks/smartlog": "^3.0.7",
|
||||||
"@push.rocks/smartlog-destination-local": "^9.0.0",
|
"@push.rocks/smartlog-destination-local": "^9.0.0",
|
||||||
"@push.rocks/smartpath": "^5.0.5",
|
"@push.rocks/smartpath": "^5.0.5",
|
||||||
"@push.rocks/smartshell": "^3.2.2",
|
"@push.rocks/smartshell": "^3.0.5",
|
||||||
"@push.rocks/smartupdate": "^2.0.4",
|
"@push.rocks/smartupdate": "^2.0.4",
|
||||||
"@push.rocks/taskbuffer": "^3.0.10",
|
"@push.rocks/taskbuffer": "^3.0.10",
|
||||||
"@serve.zone/api": "^4.5.1",
|
"@serve.zone/interfaces": "^1.0.74"
|
||||||
"@serve.zone/interfaces": "^4.5.1"
|
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
"ts/**/*",
|
"ts/**/*",
|
||||||
@ -68,14 +67,6 @@
|
|||||||
"continuous deployment",
|
"continuous deployment",
|
||||||
"deployment automation",
|
"deployment automation",
|
||||||
"service orchestration",
|
"service orchestration",
|
||||||
"node.js",
|
"node.js"
|
||||||
"task scheduling",
|
|
||||||
"CLI",
|
|
||||||
"logging",
|
|
||||||
"server maintenance",
|
|
||||||
"serve.zone",
|
|
||||||
"cluster management",
|
|
||||||
"system manager",
|
|
||||||
"server configuration"
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
5720
pnpm-lock.yaml
generated
5720
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@ -1,2 +1,2 @@
|
|||||||
- make sure to mention that this package is part of serve.zone and **spark's main purpose** is the be used by @serve.zone/cloudly as a cluster node server system manager.
|
- this package is part of serve.zone
|
||||||
- it is used to maintain and configure servers on the base OS level
|
- it is used to maintain and configure servers on the base OS level
|
227
readme.md
227
readme.md
@ -1,35 +1,31 @@
|
|||||||
# @serve.zone/spark
|
# @serve.zone/spark
|
||||||
A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the serve.zone infrastructure. It's mainly designed to be utilized by @serve.zone/cloudly as a cluster node server system manager, maintaining and configuring servers on the base OS level.
|
sparks the servezone services
|
||||||
|
|
||||||
## Install
|
## Install
|
||||||
To install `@serve.zone/spark`, run the following command in your terminal:
|
To install `@serve.zone/spark`, run the following command in your terminal:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
npm install @serve.zone/spark --save
|
npm install @serve.zone/spark --save
|
||||||
```
|
```
|
||||||
|
|
||||||
Ensure you have both Node.js and npm installed on your machine.
|
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
### Getting Started
|
### Getting Started
|
||||||
To use `@serve.zone/spark` in your project, you need to include and initiate it in your TypeScript project. Ensure you have TypeScript and the necessary build tools set up in your project.
|
To use `@serve.zone/spark` in your project, you need to include and initiate it in your TypeScript project. Ensure you have TypeScript and the necessary build tools set up in your project.
|
||||||
|
|
||||||
First, import `@serve.zone/spark`:
|
First, import `@serve.zone/spark`:
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
import { Spark } from '@serve.zone/spark';
|
import { Spark } from '@serve.zone/spark';
|
||||||
```
|
```
|
||||||
|
|
||||||
### Initializing Spark
|
### Initializing Spark
|
||||||
Create an instance of the `Spark` class to start using Spark. This instance will serve as the main entry point for interacting with Spark functionalities.
|
Create an instance of the `Spark` class to start using Spark. This instance will serve as the main entry point for interacting with the Spark functionalities.
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
const sparkInstance = new Spark();
|
const sparkInstance = new Spark();
|
||||||
```
|
```
|
||||||
|
|
||||||
### Running Spark as a Daemon
|
### Running Spark as a Daemon
|
||||||
To run Spark as a daemon, which is useful for maintaining and configuring servers at the OS level, you can use the CLI feature bundled with Spark. This should ideally be handled outside of your code through a command-line terminal but can also be automated within your Node.js scripts if required.
|
To run Spark as a daemon, which is useful for maintaining and configuring servers on the base OS level, use the CLI feature bundled with Spark. This should ideally be handled outside of your code through a command-line terminal but can also be automated within your Node.js scripts if required.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
spark installdaemon
|
spark installdaemon
|
||||||
@ -45,7 +41,7 @@ await sparkInstance.sparkUpdateManager.updateServices();
|
|||||||
```
|
```
|
||||||
|
|
||||||
### Managing Configuration and Logging
|
### Managing Configuration and Logging
|
||||||
Spark allows extensive configuration and logging customization. Use the `SparkLocalConfig` and logging features to tailor Spark's operation to your needs.
|
Spark allows for extensive configuration and logging customization. Use the `SparkLocalConfig` and logging features to tailor Spark's operation to your needs.
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// Accessing the local configuration
|
// Accessing the local configuration
|
||||||
@ -58,7 +54,7 @@ logger.log('info', 'Custom log message');
|
|||||||
```
|
```
|
||||||
|
|
||||||
### Advanced Usage
|
### Advanced Usage
|
||||||
`@serve.zone/spark` offers tools for detailed server and service management, including but not limited to task scheduling, daemon management, and service updates. Explore the `SparkTaskManager` for scheduling specific tasks, `SparkUpdateManager` for handling service updates, and `SparkLocalConfig` for configuration.
|
`@serve.zone/spark` offers a suite of tools for detailed server and service management, including but not limited to task scheduling, daemon management, and service updates. Explore the `SparkTaskManager` for scheduling specific tasks, `SparkUpdateManager` for handling service updates, and `SparkLocalConfig` for configuration.
|
||||||
|
|
||||||
### Example: Scheduling Custom Tasks
|
### Example: Scheduling Custom Tasks
|
||||||
```typescript
|
```typescript
|
||||||
@ -77,8 +73,8 @@ sparkInstance.sparkTaskManager.taskmanager.addAndScheduleTask(myTask, '* * * * *
|
|||||||
|
|
||||||
The example above creates a simple task that logs a message every second, demonstrating how to use Spark's task manager for custom scheduled tasks.
|
The example above creates a simple task that logs a message every second, demonstrating how to use Spark's task manager for custom scheduled tasks.
|
||||||
|
|
||||||
### Detailed Service Management
|
### Advanced Configuration
|
||||||
For advanced configurations, including Docker and service management, you can utilize the following patterns:
|
For advanced configurations, including Docker and service management:
|
||||||
|
|
||||||
- Use `SparkUpdateManager` to handle Docker image updates, service creation, and management.
|
- Use `SparkUpdateManager` to handle Docker image updates, service creation, and management.
|
||||||
- Access and modify Docker and service configurations through Spark's integration with configuration files and environment variables.
|
- Access and modify Docker and service configurations through Spark's integration with configuration files and environment variables.
|
||||||
@ -92,209 +88,6 @@ const newServiceDefinition = {...};
|
|||||||
await sparkInstance.sparkUpdateManager.createService(newServiceDefinition);
|
await sparkInstance.sparkUpdateManager.createService(newServiceDefinition);
|
||||||
```
|
```
|
||||||
|
|
||||||
### CLI Commands
|
### Conclusion
|
||||||
Spark provides several CLI commands to interact with and manage the system services:
|
`@serve.zone/spark` provides a comprehensive toolkit for orchestrating and managing server environments and Docker-based services. By leveraging its CLI and programmatic interfaces, you can automate and streamline server operations, configurations, updates, and task scheduling, ensuring your infrastructure is responsive, updated, and maintained efficiently.
|
||||||
|
undefined
|
||||||
#### Installing Spark as a Daemon
|
|
||||||
```shell
|
|
||||||
spark installdaemon
|
|
||||||
```
|
|
||||||
|
|
||||||
Sets up Spark as a system service to maintain server configurations automatically.
|
|
||||||
|
|
||||||
#### Updating the Daemon
|
|
||||||
```shell
|
|
||||||
spark updatedaemon
|
|
||||||
```
|
|
||||||
|
|
||||||
Updates the daemon service if a new version is available.
|
|
||||||
|
|
||||||
#### Running Spark as Daemon
|
|
||||||
```shell
|
|
||||||
spark asdaemon
|
|
||||||
```
|
|
||||||
|
|
||||||
Runs Spark in daemon mode, which is suitable for executing automated tasks.
|
|
||||||
|
|
||||||
#### Viewing Logs
|
|
||||||
```shell
|
|
||||||
spark logs
|
|
||||||
```
|
|
||||||
|
|
||||||
Views the logs of the Spark daemon service.
|
|
||||||
|
|
||||||
#### Cleaning Up Services
|
|
||||||
```shell
|
|
||||||
spark prune
|
|
||||||
```
|
|
||||||
|
|
||||||
Stops and cleans up all Docker services (stacks, networks, secrets, etc.) and prunes the Docker system.
|
|
||||||
|
|
||||||
### Programmatic Daemon Management
|
|
||||||
You can also manage the daemon programmatically:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { SmartDaemon } from '@push.rocks/smartdaemon';
|
|
||||||
import { Spark } from '@serve.zone/spark';
|
|
||||||
|
|
||||||
const sparkInstance = new Spark();
|
|
||||||
const smartDaemon = new SmartDaemon();
|
|
||||||
|
|
||||||
const startDaemon = async () => {
|
|
||||||
const sparkService = await smartDaemon.addService({
|
|
||||||
name: 'spark',
|
|
||||||
version: sparkInstance.sparkInfo.projectInfo.version,
|
|
||||||
command: 'spark asdaemon',
|
|
||||||
description: 'Spark daemon service',
|
|
||||||
workingDir: '/path/to/project',
|
|
||||||
});
|
|
||||||
await sparkService.save();
|
|
||||||
await sparkService.enable();
|
|
||||||
await sparkService.start();
|
|
||||||
};
|
|
||||||
|
|
||||||
const updateDaemon = async () => {
|
|
||||||
const sparkService = await smartDaemon.addService({
|
|
||||||
name: 'spark',
|
|
||||||
version: sparkInstance.sparkInfo.projectInfo.version,
|
|
||||||
command: 'spark asdaemon',
|
|
||||||
description: 'Spark daemon service',
|
|
||||||
workingDir: '/path/to/project',
|
|
||||||
});
|
|
||||||
await sparkService.reload();
|
|
||||||
};
|
|
||||||
|
|
||||||
startDaemon();
|
|
||||||
updateDaemon();
|
|
||||||
```
|
|
||||||
|
|
||||||
This illustrates how to initiate and update the Spark daemon using the `SmartDaemon` class from `@push.rocks/smartdaemon`.
|
|
||||||
|
|
||||||
### Configuration Management
|
|
||||||
Extensive configuration management is possible through the `SparkLocalConfig` and other configuration classes. This feature allows you to make your application's behavior adaptable based on different environments and requirements.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Example on setting local config
|
|
||||||
import { SparkLocalConfig } from '@serve.zone/spark';
|
|
||||||
|
|
||||||
const localConfig = new SparkLocalConfig(sparkInstance);
|
|
||||||
await localConfig.kvStore.set('someKey', 'someValue');
|
|
||||||
|
|
||||||
// Retrieving a value from local config
|
|
||||||
const someConfigValue = await localConfig.kvStore.get('someKey');
|
|
||||||
|
|
||||||
console.log(someConfigValue); // Outputs: someValue
|
|
||||||
```
|
|
||||||
|
|
||||||
### Detailed Log Management
|
|
||||||
Logging is a crucial aspect of any automation tool, and `@serve.zone/spark` offers rich logging functionality through its built-in logging library.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { logger, Spark } from '@serve.zone/spark';
|
|
||||||
|
|
||||||
const sparkInstance = new Spark();
|
|
||||||
|
|
||||||
logger.log('info', 'Spark instance created.');
|
|
||||||
|
|
||||||
// Using logger in various levels of severity
|
|
||||||
logger.log('debug', 'This is a debug message');
|
|
||||||
logger.log('warn', 'This is a warning message');
|
|
||||||
logger.log('error', 'This is an error message');
|
|
||||||
logger.log('ok', 'This is a success message');
|
|
||||||
```
|
|
||||||
|
|
||||||
### Real-World Scenarios
|
|
||||||
|
|
||||||
#### Automated System Update and Restart
|
|
||||||
In real-world scenarios, you might want to automate system updates and reboots to ensure your services are running the latest security patches and features.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { Spark } from '@serve.zone/spark';
|
|
||||||
import { SmartShell } from '@push.rocks/smartshell';
|
|
||||||
|
|
||||||
const sparkInstance = new Spark();
|
|
||||||
const shell = new SmartShell({ executor: 'bash' });
|
|
||||||
|
|
||||||
const updateAndRestart = async () => {
|
|
||||||
await shell.exec('apt-get update && apt-get upgrade -y');
|
|
||||||
console.log('System updated.');
|
|
||||||
await shell.exec('reboot');
|
|
||||||
};
|
|
||||||
|
|
||||||
sparkInstance.sparkTaskManager.taskmanager.addAndScheduleTask(
|
|
||||||
{ name: 'updateAndRestart', taskFunction: updateAndRestart },
|
|
||||||
'0 3 * * 7' // Every Sunday at 3 AM
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
This example demonstrates creating and scheduling a task to update and restart the server every Sunday at 3 AM using Spark's task management capabilities.
|
|
||||||
|
|
||||||
#### Integrating with Docker for Service Deployment
|
|
||||||
Spark's tight integration with Docker makes it an excellent tool for deploying containerized applications across your infrastructure.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { Spark } from '@serve.zone/spark';
|
|
||||||
import { DockerHost } from '@apiclient.xyz/docker';
|
|
||||||
|
|
||||||
const sparkInstance = new Spark();
|
|
||||||
const dockerHost = new DockerHost({});
|
|
||||||
|
|
||||||
const deployService = async () => {
|
|
||||||
const image = await dockerHost.pullImage('my-docker-repo/my-service:latest');
|
|
||||||
const newService = await dockerHost.createService({
|
|
||||||
name: 'my-service',
|
|
||||||
image,
|
|
||||||
ports: ['80:8080'],
|
|
||||||
environmentVariables: {
|
|
||||||
NODE_ENV: 'production',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
console.log(`Service ${newService.name} deployed.`);
|
|
||||||
};
|
|
||||||
|
|
||||||
deployService();
|
|
||||||
```
|
|
||||||
|
|
||||||
This example demonstrates how to pull a Docker image and deploy it as a new service in your infrastructure using Spark's Docker integration.
|
|
||||||
|
|
||||||
### Managing Secrets
|
|
||||||
Managing secrets and sensitive data is crucial in any configuration and automation tool. Spark's integration with Docker allows you to handle secrets securely.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { Spark, SparkUpdateManager } from '@serve.zone/spark';
|
|
||||||
import { DockerSecret } from '@apiclient.xyz/docker';
|
|
||||||
|
|
||||||
const sparkInstance = new Spark();
|
|
||||||
const updateManager = new SparkUpdateManager(sparkInstance);
|
|
||||||
|
|
||||||
const createDockerSecret = async () => {
|
|
||||||
const secret = await DockerSecret.createSecret(updateManager.dockerHost, {
|
|
||||||
name: 'dbPassword',
|
|
||||||
contentArg: 'superSecretPassword',
|
|
||||||
});
|
|
||||||
console.log(`Secret ${secret.Spec.Name} created.`);
|
|
||||||
};
|
|
||||||
|
|
||||||
createDockerSecret();
|
|
||||||
```
|
|
||||||
|
|
||||||
This example shows how to create a Docker secret using Spark's `SparkUpdateManager` class, ensuring that sensitive information is securely stored and managed.
|
|
||||||
|
|
||||||
## License and Legal Information
|
|
||||||
|
|
||||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
|
||||||
|
|
||||||
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
### Trademarks
|
|
||||||
|
|
||||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
|
|
||||||
|
|
||||||
### Company Information
|
|
||||||
|
|
||||||
Task Venture Capital GmbH
|
|
||||||
Registered at District court Bremen HRB 35230 HB, Germany
|
|
||||||
|
|
||||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
|
||||||
|
|
||||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
|
@ -1,8 +1,8 @@
|
|||||||
/**
|
/**
|
||||||
* autocreated commitinfo by @push.rocks/commitinfo
|
* autocreated commitinfo by @pushrocks/commitinfo
|
||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@serve.zone/spark',
|
name: '@serve.zone/spark',
|
||||||
version: '1.2.2',
|
version: '1.0.87',
|
||||||
description: 'A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure and used by @serve.zone/cloudly as a cluster node server system manager.'
|
description: 'A tool to maintain and configure servers on the base OS level for the Servezone infrastructure.'
|
||||||
}
|
}
|
||||||
|
@ -3,12 +3,7 @@ import { Spark } from './index.js';
|
|||||||
|
|
||||||
export class SparkConfig {
|
export class SparkConfig {
|
||||||
public sparkRef: Spark;
|
public sparkRef: Spark;
|
||||||
public kvStore: plugins.npmextra.KeyValueStore;
|
|
||||||
constructor(sparkRefArg: Spark) {
|
constructor(sparkRefArg: Spark) {
|
||||||
this.sparkRef = sparkRefArg;
|
this.sparkRef = sparkRefArg;
|
||||||
this.kvStore = new plugins.npmextra.KeyValueStore({
|
|
||||||
typeArg: 'userHomeDir',
|
|
||||||
identityArg: 'servezone_spark',
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
15
ts/spark.classes.localconfig.ts
Normal file
15
ts/spark.classes.localconfig.ts
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
import * as plugins from './spark.plugins.js';
|
||||||
|
import { Spark } from './index.js';
|
||||||
|
|
||||||
|
export class SparkLocalConfig {
|
||||||
|
public sparkRef: Spark;
|
||||||
|
private kvStore: plugins.npmextra.KeyValueStore;
|
||||||
|
|
||||||
|
constructor(sparkRefArg: Spark) {
|
||||||
|
this.sparkRef = sparkRefArg;
|
||||||
|
this.kvStore = new plugins.npmextra.KeyValueStore({
|
||||||
|
typeArg: 'userHomeDir',
|
||||||
|
identityArg: 'spark',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
@ -1,23 +1,23 @@
|
|||||||
import * as plugins from './spark.plugins.js';
|
import * as plugins from './spark.plugins.js';
|
||||||
import { SparkTaskManager } from './spark.classes.taskmanager.js';
|
import { SparkTaskManager } from './spark.classes.taskmanager.js';
|
||||||
import { SparkInfo } from './spark.classes.info.js';
|
import { SparkInfo } from './spark.classes.info.js';
|
||||||
import { SparkServicesManager } from './spark.classes.updatemanager.js';
|
import { SparkUpdateManager } from './spark.classes.updatemanager.js';
|
||||||
import { logger } from './spark.logging.js';
|
import { logger } from './spark.logging.js';
|
||||||
import { SparkConfig } from './spark.classes.config.js';
|
import { SparkLocalConfig } from './spark.classes.localconfig.js';
|
||||||
|
|
||||||
export class Spark {
|
export class Spark {
|
||||||
public smartdaemon: plugins.smartdaemon.SmartDaemon;
|
public smartdaemon: plugins.smartdaemon.SmartDaemon;
|
||||||
public sparkConfig: SparkConfig;
|
public sparkLocalConfig: SparkLocalConfig;
|
||||||
public sparkTaskManager: SparkTaskManager;
|
public sparkTaskManager: SparkTaskManager;
|
||||||
public sparkInfo: SparkInfo;
|
public sparkInfo: SparkInfo;
|
||||||
public sparkUpdateManager: SparkServicesManager;
|
public sparkUpdateManager: SparkUpdateManager;
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
this.smartdaemon = new plugins.smartdaemon.SmartDaemon();
|
this.smartdaemon = new plugins.smartdaemon.SmartDaemon();
|
||||||
this.sparkConfig = new SparkConfig(this);
|
this.sparkLocalConfig = new SparkLocalConfig(this);
|
||||||
this.sparkInfo = new SparkInfo(this);
|
this.sparkInfo = new SparkInfo(this);
|
||||||
this.sparkTaskManager = new SparkTaskManager(this);
|
this.sparkTaskManager = new SparkTaskManager(this);
|
||||||
this.sparkUpdateManager = new SparkServicesManager(this);
|
this.sparkUpdateManager = new SparkUpdateManager(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async daemonStart() {
|
public async daemonStart() {
|
||||||
|
@ -8,14 +8,25 @@ export class SparkTaskManager {
|
|||||||
public taskmanager: plugins.taskbuffer.TaskManager;
|
public taskmanager: plugins.taskbuffer.TaskManager;
|
||||||
|
|
||||||
// tasks
|
// tasks
|
||||||
|
public checkinSlackTask: plugins.taskbuffer.Task;
|
||||||
public updateSpark: plugins.taskbuffer.Task;
|
public updateSpark: plugins.taskbuffer.Task;
|
||||||
public updateHost: plugins.taskbuffer.Task;
|
public updateHost: plugins.taskbuffer.Task;
|
||||||
public updateServices: plugins.taskbuffer.Task;
|
public updateCloudly: plugins.taskbuffer.Task;
|
||||||
|
|
||||||
constructor(sparkRefArg: Spark) {
|
constructor(sparkRefArg: Spark) {
|
||||||
this.sparkRef = sparkRefArg;
|
this.sparkRef = sparkRefArg;
|
||||||
this.taskmanager = new plugins.taskbuffer.TaskManager();
|
this.taskmanager = new plugins.taskbuffer.TaskManager();
|
||||||
|
|
||||||
|
// checkinOnSlack
|
||||||
|
this.checkinSlackTask = new plugins.taskbuffer.Task({
|
||||||
|
name: 'checkinSlack',
|
||||||
|
taskFunction: async () => {
|
||||||
|
logger.log('ok', 'running hourly checkin now');
|
||||||
|
|
||||||
|
logger.log('info', 'completed hourly checkin');
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
// updateSpark
|
// updateSpark
|
||||||
this.updateSpark = new plugins.taskbuffer.Task({
|
this.updateSpark = new plugins.taskbuffer.Task({
|
||||||
name: 'updateSpark',
|
name: 'updateSpark',
|
||||||
@ -56,10 +67,7 @@ export class SparkTaskManager {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
/**
|
this.updateCloudly = new plugins.taskbuffer.Task({
|
||||||
* only being run when mode is cloudly
|
|
||||||
*/
|
|
||||||
this.updateServices = new plugins.taskbuffer.Task({
|
|
||||||
name: 'updateCloudly',
|
name: 'updateCloudly',
|
||||||
taskFunction: async () => {
|
taskFunction: async () => {
|
||||||
logger.log('info', 'now running updateCloudly task');
|
logger.log('info', 'now running updateCloudly task');
|
||||||
@ -72,9 +80,10 @@ export class SparkTaskManager {
|
|||||||
* start the taskmanager
|
* start the taskmanager
|
||||||
*/
|
*/
|
||||||
public async start() {
|
public async start() {
|
||||||
this.taskmanager.addAndScheduleTask(this.updateServices, '30 */2 * * * *');
|
this.taskmanager.addAndScheduleTask(this.checkinSlackTask, '0 0 * * * *');
|
||||||
this.taskmanager.addAndScheduleTask(this.updateSpark, '0 * * * * *');
|
this.taskmanager.addAndScheduleTask(this.updateSpark, '0 * * * * *');
|
||||||
this.taskmanager.addAndScheduleTask(this.updateHost, '0 0 0 * * *');
|
this.taskmanager.addAndScheduleTask(this.updateHost, '0 0 0 * * *');
|
||||||
|
this.taskmanager.addAndScheduleTask(this.updateCloudly, '30 */2 * * * *');
|
||||||
this.taskmanager.start();
|
this.taskmanager.start();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -82,9 +91,10 @@ export class SparkTaskManager {
|
|||||||
* stops the taskmanager
|
* stops the taskmanager
|
||||||
*/
|
*/
|
||||||
public async stop() {
|
public async stop() {
|
||||||
|
this.taskmanager.descheduleTask(this.checkinSlackTask);
|
||||||
this.taskmanager.descheduleTask(this.updateSpark);
|
this.taskmanager.descheduleTask(this.updateSpark);
|
||||||
this.taskmanager.descheduleTask(this.updateHost);
|
this.taskmanager.descheduleTask(this.updateHost);
|
||||||
this.taskmanager.descheduleTask(this.updateServices);
|
this.taskmanager.descheduleTask(this.updateCloudly);
|
||||||
this.taskmanager.stop();
|
this.taskmanager.stop();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,26 +3,10 @@ import * as paths from './spark.paths.js';
|
|||||||
import { Spark } from './spark.classes.spark.js';
|
import { Spark } from './spark.classes.spark.js';
|
||||||
import { logger } from './spark.logging.js';
|
import { logger } from './spark.logging.js';
|
||||||
|
|
||||||
/**
|
export class SparkUpdateManager {
|
||||||
* this class takes care of updating the services that are managed by spark
|
|
||||||
*/
|
|
||||||
export class SparkServicesManager {
|
|
||||||
public sparkRef: Spark;
|
public sparkRef: Spark;
|
||||||
public dockerHost: plugins.docker.DockerHost;
|
public dockerHost: plugins.docker.DockerHost;
|
||||||
public smartupdate: plugins.smartupdate.SmartUpdate;
|
public smartupdate: plugins.smartupdate.SmartUpdate;
|
||||||
|
|
||||||
/**
|
|
||||||
* the services that are managed by spark
|
|
||||||
*/
|
|
||||||
services: Array<{
|
|
||||||
name: string;
|
|
||||||
image: string;
|
|
||||||
url: string;
|
|
||||||
port: string;
|
|
||||||
environment: string;
|
|
||||||
secretJson: any;
|
|
||||||
}> = [];
|
|
||||||
|
|
||||||
constructor(sparkrefArg: Spark) {
|
constructor(sparkrefArg: Spark) {
|
||||||
this.sparkRef = sparkrefArg;
|
this.sparkRef = sparkrefArg;
|
||||||
this.dockerHost = new plugins.docker.DockerHost({});
|
this.dockerHost = new plugins.docker.DockerHost({});
|
||||||
@ -37,7 +21,60 @@ export class SparkServicesManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public async updateServices() {
|
public async updateServices() {
|
||||||
for (const service of this.services) {
|
if (
|
||||||
|
plugins.smartfile.fs.isDirectory(plugins.path.join(paths.homeDir, 'serve.zone/spark')) &&
|
||||||
|
(await plugins.smartfile.fs.fileExists(
|
||||||
|
plugins.path.join(paths.homeDir, 'serve.zone/spark/spark.json')
|
||||||
|
))
|
||||||
|
) {
|
||||||
|
const services: Array<{
|
||||||
|
name: string;
|
||||||
|
image: string;
|
||||||
|
url: string;
|
||||||
|
port: string;
|
||||||
|
environment: string;
|
||||||
|
secretJson: any;
|
||||||
|
}> = [];
|
||||||
|
// lets add coreflow
|
||||||
|
services.push({
|
||||||
|
name: `coreflow`,
|
||||||
|
image: `code.foss.global/serve.zone/coreflow`,
|
||||||
|
url: `coreflow`,
|
||||||
|
environment: `production`,
|
||||||
|
port: `3000`,
|
||||||
|
secretJson: {
|
||||||
|
SERVEZONE_PORT: `3000`,
|
||||||
|
SERVEZONE_ENVIRONMENT: `production`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
services.push({
|
||||||
|
name: `coretraffic`,
|
||||||
|
image: `code.foss.global/serve.zone/coretraffic`,
|
||||||
|
url: `coreflow`,
|
||||||
|
environment: `production`,
|
||||||
|
port: `3000`,
|
||||||
|
secretJson: {
|
||||||
|
SERVEZONE_PORT: `3000`,
|
||||||
|
SERVEZONE_ENVIRONMENT: `production`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
services.push({
|
||||||
|
name: `corelog`,
|
||||||
|
image: `code.foss.global/serve.zone/corelog`,
|
||||||
|
url: `coreflow`,
|
||||||
|
environment: `production`,
|
||||||
|
port: `3000`,
|
||||||
|
secretJson: {
|
||||||
|
SERVEZONE_PORT: `3000`,
|
||||||
|
SERVEZONE_ENVIRONMENT: `production`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// lets add coretraffic
|
||||||
|
|
||||||
|
for (const service of services) {
|
||||||
const existingService = await plugins.docker.DockerService.getServiceByName(
|
const existingService = await plugins.docker.DockerService.getServiceByName(
|
||||||
this.dockerHost,
|
this.dockerHost,
|
||||||
service.name
|
service.name
|
||||||
@ -49,11 +86,10 @@ export class SparkServicesManager {
|
|||||||
if (existingService) {
|
if (existingService) {
|
||||||
const needsUpdate: boolean = await existingService.needsUpdate();
|
const needsUpdate: boolean = await existingService.needsUpdate();
|
||||||
if (!needsUpdate) {
|
if (!needsUpdate) {
|
||||||
logger.log('info', `service >>${service.name}<< not needing update.`);
|
logger.log('info', `not needing update.`);
|
||||||
// we simply return here to end the functions
|
// we simply return here to end the functions
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// continuing here means we need to update the service
|
|
||||||
logger.log('ok', `${service.name} needs to be updated!`);
|
logger.log('ok', `${service.name} needs to be updated!`);
|
||||||
await existingService.remove();
|
await existingService.remove();
|
||||||
await existingServiceSecret.remove();
|
await existingServiceSecret.remove();
|
||||||
@ -76,7 +112,6 @@ export class SparkServicesManager {
|
|||||||
version: await newServiceImage.getVersion(),
|
version: await newServiceImage.getVersion(),
|
||||||
labels: {},
|
labels: {},
|
||||||
});
|
});
|
||||||
|
|
||||||
const newService = await plugins.docker.DockerService.createService(this.dockerHost, {
|
const newService = await plugins.docker.DockerService.createService(this.dockerHost, {
|
||||||
image: newServiceImage,
|
image: newServiceImage,
|
||||||
labels: {},
|
labels: {},
|
||||||
@ -88,7 +123,7 @@ export class SparkServicesManager {
|
|||||||
});
|
});
|
||||||
logger.log('ok', `updated service >>${newService.Spec.Name}<<!`);
|
logger.log('ok', `updated service >>${newService.Spec.Name}<<!`);
|
||||||
}
|
}
|
||||||
|
logger.log('success', `updated ${services.length} services!`);
|
||||||
logger.log('success', `updated ${this.services.length} services!`);
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -45,50 +45,6 @@ export const runCli = async () => {
|
|||||||
smartcliInstance.addCommand('asdaemon').subscribe(async (argvArg) => {
|
smartcliInstance.addCommand('asdaemon').subscribe(async (argvArg) => {
|
||||||
logger.log('success', 'looks like we are running as daemon now');
|
logger.log('success', 'looks like we are running as daemon now');
|
||||||
logger.log('info', 'starting spark in daemon mode');
|
logger.log('info', 'starting spark in daemon mode');
|
||||||
|
|
||||||
// lets determine the mode if specified
|
|
||||||
let mode = argvArg.mode;
|
|
||||||
if (mode === 'cloudly') {
|
|
||||||
await sparkInstance.sparkConfig.kvStore.writeKey('mode', 'cloudly');
|
|
||||||
} else if (mode === 'coreflow-node') {
|
|
||||||
await sparkInstance.sparkConfig.kvStore.writeKey('mode', 'coreflow-node');
|
|
||||||
} else if (mode) {
|
|
||||||
logger.log('error', 'unknown mode specified');
|
|
||||||
process.exit(1);
|
|
||||||
} else {
|
|
||||||
// mode is not specified by cli, lets get it from the config
|
|
||||||
mode = await sparkInstance.sparkConfig.kvStore.readKey('mode');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!mode) {
|
|
||||||
logger.log('error', 'no mode specified by either cli or config');
|
|
||||||
process.exit(1);
|
|
||||||
} else if (mode === 'cloudly') {
|
|
||||||
sparkInstance.sparkUpdateManager.services.push({
|
|
||||||
name: `coreflow`,
|
|
||||||
image: `code.foss.global/serve.zone/cloudly`,
|
|
||||||
url: `cloudly`,
|
|
||||||
environment: `production`,
|
|
||||||
port: `3000`,
|
|
||||||
secretJson: {
|
|
||||||
SERVEZONE_PORT: `3000`,
|
|
||||||
SERVEZONE_ENVIRONMENT: `production`,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
} else if (mode === 'coreflow-node') {
|
|
||||||
sparkInstance.sparkUpdateManager.services.push({
|
|
||||||
name: `coreflow`,
|
|
||||||
image: `code.foss.global/serve.zone/coreflow`,
|
|
||||||
url: `coreflow`,
|
|
||||||
environment: `production`,
|
|
||||||
port: `3000`,
|
|
||||||
secretJson: {
|
|
||||||
SERVEZONE_PORT: `3000`,
|
|
||||||
SERVEZONE_ENVIRONMENT: `production`,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
await sparkInstance.daemonStart();
|
await sparkInstance.daemonStart();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -5,9 +5,8 @@ export { path };
|
|||||||
|
|
||||||
// @serve.zone scope
|
// @serve.zone scope
|
||||||
import * as servezoneInterfaces from '@serve.zone/interfaces';
|
import * as servezoneInterfaces from '@serve.zone/interfaces';
|
||||||
import * as servezoneApi from '@serve.zone/api';
|
|
||||||
|
|
||||||
export { servezoneInterfaces, servezoneApi };
|
export { servezoneInterfaces };
|
||||||
|
|
||||||
// @apiclient.xyz scope
|
// @apiclient.xyz scope
|
||||||
import * as docker from '@apiclient.xyz/docker';
|
import * as docker from '@apiclient.xyz/docker';
|
||||||
|
Loading…
x
Reference in New Issue
Block a user