fix(deps): update Docker API usage to DockerHost facade, bump dependencies, and adjust tests/docs

This commit is contained in:
2026-02-04 11:36:05 +00:00
parent 6117b20cc5
commit 31dc8eee22
11 changed files with 933 additions and 968 deletions

1
.serena/.gitignore vendored
View File

@@ -1 +0,0 @@
/cache

View File

@@ -1,47 +0,0 @@
# Spark Project Overview
## Project Purpose
Spark is a comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure and used by @serve.zone/cloudly as a cluster node server system manager.
## Tech Stack
- **Language**: TypeScript
- **Runtime**: Node.js (currently)
- **Package Manager**: pnpm
- **Build Tool**: @git.zone/tsbuild
- **Test Framework**: @git.zone/tstest with @push.rocks/tapbundle
- **CLI Framework**: @push.rocks/smartcli
- **Version**: 1.2.2
## Directory Structure
```
spark/
├── ts/ # TypeScript source files
├── test/ # Test files (single test.nonci.ts)
├── dist_ts/ # Compiled TypeScript output
├── cli.js # CLI entry point
├── cli.child.ts # Child process CLI
├── cli.ts.js # TypeScript CLI wrapper
└── package.json # Dependencies and scripts
```
## Key Dependencies
- **@serve.zone/api**: API client for Servezone
- **@serve.zone/interfaces**: Interface definitions
- **@apiclient.xyz/docker**: Docker API client
- **@push.rocks/*** packages: Various utilities (smartlog, smartfile, smartcli, smartdaemon, etc.)
## Main Components
1. **CLI** (spark.cli.ts): Command-line interface with commands like installdaemon, updatedaemon, asdaemon
2. **Spark** (spark.classes.spark.ts): Main application class
3. **TaskManager** (spark.classes.taskmanager.ts): Task scheduling
4. **UpdateManager** (spark.classes.updatemanager.ts): Service updates
5. **Config** (spark.classes.config.ts): Configuration management
## Commands
- `pnpm build`: Build the TypeScript code
- `pnpm test`: Run tests
- `spark installdaemon`: Install as system daemon
- `spark updatedaemon`: Update daemon service
- `spark asdaemon`: Run as daemon
- `spark logs`: View daemon logs
- `spark prune`: Clean up resources

View File

@@ -1,71 +0,0 @@
# language of the project (csharp, python, rust, java, typescript, go, cpp, or ruby)
# * For C, use cpp
# * For JavaScript, use typescript
# Special requirements:
# * csharp: Requires the presence of a .sln file in the project folder.
language: typescript
# the encoding used by text files in the project
# For a list of possible encodings, see https://docs.python.org/3.11/library/codecs.html#standard-encodings
encoding: "utf-8"
# whether to use the project's gitignore file to ignore files
# Added on 2025-04-07
ignore_all_files_in_gitignore: true
# list of additional paths to ignore
# same syntax as gitignore, so you can use * and **
# Was previously called `ignored_dirs`, please update your config if you are using that.
# Added (renamed) on 2025-04-07
ignored_paths: []
# whether the project is in read-only mode
# If set to true, all editing tools will be disabled and attempts to use them will result in an error
# Added on 2025-04-18
read_only: false
# list of tool names to exclude. We recommend not excluding any tools, see the readme for more details.
# Below is the complete list of tools for convenience.
# To make sure you have the latest list of tools, and to view their descriptions,
# execute `uv run scripts/print_tool_overview.py`.
#
# * `activate_project`: Activates a project by name.
# * `check_onboarding_performed`: Checks whether project onboarding was already performed.
# * `create_text_file`: Creates/overwrites a file in the project directory.
# * `delete_lines`: Deletes a range of lines within a file.
# * `delete_memory`: Deletes a memory from Serena's project-specific memory store.
# * `execute_shell_command`: Executes a shell command.
# * `find_referencing_code_snippets`: Finds code snippets in which the symbol at the given location is referenced.
# * `find_referencing_symbols`: Finds symbols that reference the symbol at the given location (optionally filtered by type).
# * `find_symbol`: Performs a global (or local) search for symbols with/containing a given name/substring (optionally filtered by type).
# * `get_current_config`: Prints the current configuration of the agent, including the active and available projects, tools, contexts, and modes.
# * `get_symbols_overview`: Gets an overview of the top-level symbols defined in a given file.
# * `initial_instructions`: Gets the initial instructions for the current project.
# Should only be used in settings where the system prompt cannot be set,
# e.g. in clients you have no control over, like Claude Desktop.
# * `insert_after_symbol`: Inserts content after the end of the definition of a given symbol.
# * `insert_at_line`: Inserts content at a given line in a file.
# * `insert_before_symbol`: Inserts content before the beginning of the definition of a given symbol.
# * `list_dir`: Lists files and directories in the given directory (optionally with recursion).
# * `list_memories`: Lists memories in Serena's project-specific memory store.
# * `onboarding`: Performs onboarding (identifying the project structure and essential tasks, e.g. for testing or building).
# * `prepare_for_new_conversation`: Provides instructions for preparing for a new conversation (in order to continue with the necessary context).
# * `read_file`: Reads a file within the project directory.
# * `read_memory`: Reads the memory with the given name from Serena's project-specific memory store.
# * `remove_project`: Removes a project from the Serena configuration.
# * `replace_lines`: Replaces a range of lines within a file with new content.
# * `replace_symbol_body`: Replaces the full definition of a symbol.
# * `restart_language_server`: Restarts the language server, may be necessary when edits not through Serena happen.
# * `search_for_pattern`: Performs a search for a pattern in the project.
# * `summarize_changes`: Provides instructions for summarizing the changes made to the codebase.
# * `switch_modes`: Activates modes by providing a list of their names
# * `think_about_collected_information`: Thinking tool for pondering the completeness of collected information.
# * `think_about_task_adherence`: Thinking tool for determining whether the agent is still on track with the current task.
# * `think_about_whether_you_are_done`: Thinking tool for determining whether the task is truly completed.
# * `write_memory`: Writes a named memory (for future reference) to Serena's project-specific memory store.
excluded_tools: []
# initial prompt for the project. It will always be given to the LLM upon activating the project
# (contrary to the memories, which are loaded on demand).
initial_prompt: ""
project_name: "spark"

View File

@@ -1,5 +1,14 @@
# Changelog
## 2026-02-04 - 1.2.5 - fix(deps)
update Docker API usage to DockerHost facade, bump dependencies, and adjust tests/docs
- Bumped @serve.zone/api and @serve.zone/interfaces to ^5.3.0 and @apiclient.xyz/docker to ^5.1.0 in deno.json
- Replaced plugins.docker.* calls with dockerHost facade methods (getServiceByName, getSecretByName, createImageFromRegistry → createImageFromRegistry, createSecret, createService) in updatemanager and added conditional secret removal to avoid errors
- Updated a number of @push.rocks package versions in deno.json
- Adjusted test/test.ts to use the Deno.test object form and disabled sanitizeResources/sanitizeOps to accommodate smartdaemon signal listeners
- Bumped install example version in readme to v1.2.4 and cleaned up project Serena metadata files
## 2025-10-23 - 1.2.4 - fix(deno)
Update deno configuration and add local Claude settings

View File

@@ -37,27 +37,27 @@
"@std/path": "jsr:@std/path@^1.0.0",
"@std/fmt": "jsr:@std/fmt@^1.0.0",
"@std/assert": "jsr:@std/assert@^1.0.0",
"@serve.zone/interfaces": "npm:@serve.zone/interfaces@^4.5.1",
"@serve.zone/api": "npm:@serve.zone/api@^4.5.1",
"@apiclient.xyz/docker": "npm:@apiclient.xyz/docker@^1.2.7",
"@push.rocks/npmextra": "npm:@push.rocks/npmextra@^5.1.2",
"@push.rocks/projectinfo": "npm:@push.rocks/projectinfo@^5.0.1",
"@push.rocks/qenv": "npm:@push.rocks/qenv@^6.1.0",
"@push.rocks/smartcli": "npm:@push.rocks/smartcli@^4.0.11",
"@push.rocks/smartdaemon": "npm:@push.rocks/smartdaemon@^2.0.3",
"@serve.zone/interfaces": "npm:@serve.zone/interfaces@^5.3.0",
"@serve.zone/api": "npm:@serve.zone/api@^5.3.0",
"@apiclient.xyz/docker": "npm:@apiclient.xyz/docker@^5.1.0",
"@push.rocks/npmextra": "npm:@push.rocks/npmextra@^5.3.3",
"@push.rocks/projectinfo": "npm:@push.rocks/projectinfo@^5.0.2",
"@push.rocks/qenv": "npm:@push.rocks/qenv@^6.1.3",
"@push.rocks/smartcli": "npm:@push.rocks/smartcli@^4.0.20",
"@push.rocks/smartdaemon": "npm:@push.rocks/smartdaemon@^2.1.0",
"@push.rocks/smartdelay": "npm:@push.rocks/smartdelay@^3.0.5",
"@push.rocks/smartfile": "npm:@push.rocks/smartfile@^11.0.23",
"@push.rocks/smartjson": "npm:@push.rocks/smartjson@^5.0.20",
"@push.rocks/smartlog": "npm:@push.rocks/smartlog@^3.0.7",
"@push.rocks/smartlog-destination-local": "npm:@push.rocks/smartlog-destination-local@^9.0.0",
"@push.rocks/smartlog": "npm:@push.rocks/smartlog@^3.1.10",
"@push.rocks/smartlog-destination-local": "npm:@push.rocks/smartlog-destination-local@^9.0.2",
"@push.rocks/smartpath": "npm:@push.rocks/smartpath@^5.0.5",
"@push.rocks/smartshell": "npm:@push.rocks/smartshell@^3.2.2",
"@push.rocks/smartupdate": "npm:@push.rocks/smartupdate@^2.0.4",
"@push.rocks/smartshell": "npm:@push.rocks/smartshell@^3.3.0",
"@push.rocks/smartupdate": "npm:@push.rocks/smartupdate@^2.0.6",
"@push.rocks/taskbuffer": "npm:@push.rocks/taskbuffer@^3.0.10",
"@push.rocks/smartexpect": "npm:@push.rocks/smartexpect@^1.0.15",
"@push.rocks/smartrx": "npm:@push.rocks/smartrx@^3.0.10",
"@push.rocks/smartpromise": "npm:@push.rocks/smartpromise@^4.0.0",
"@push.rocks/smartstring": "npm:@push.rocks/smartstring@^4.0.0",
"@push.rocks/smarttime": "npm:@push.rocks/smarttime@^4.0.0"
"@push.rocks/smartpromise": "npm:@push.rocks/smartpromise@^4.2.3",
"@push.rocks/smartstring": "npm:@push.rocks/smartstring@^4.1.0",
"@push.rocks/smarttime": "npm:@push.rocks/smarttime@^4.1.1"
}
}

1651
deno.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,2 +1,40 @@
- make sure to mention that this package is part of serve.zone and **spark's main purpose** is the be used by @serve.zone/cloudly as a cluster node server system manager.
- it is used to maintain and configure servers on the base OS level
# Project Memory: @serve.zone/spark
## Project Overview
- **Purpose**: Cluster node server system manager used by @serve.zone/cloudly
- **Runtime**: Deno 2.x - this is a Deno-based project, NOT Node.js
- **Configuration**: Uses `deno.json` for dependencies and tasks
- **Main entry**: `mod.ts` for CLI, exports from `ts/index.ts`
## Architecture
- **SparkConfig**: Key-value store for configuration (`npmextra.KeyValueStore`)
- **SparkTaskManager**: Cron-based task scheduling (`taskbuffer.TaskManager`)
- **SparkServicesManager**: Docker service management via `@apiclient.xyz/docker`
- **SmartDaemon**: Systemd integration for daemon mode
## Key Dependencies (Updated Feb 2026)
- `@apiclient.xyz/docker@^5.1.0` - Docker API (Facade pattern with DockerHost as entry point)
- `@serve.zone/api@^5.3.0` - Serve.zone API client
- `@serve.zone/interfaces@^5.3.0` - Type definitions
- `@push.rocks/taskbuffer@^3.0.10` - Task scheduling
- `@push.rocks/smartdaemon@^2.1.0` - Systemd service management
- `@push.rocks/smartcli@^4.0.20` - CLI framework
## Docker API Notes (v5.x)
The `@apiclient.xyz/docker` package uses a Facade pattern. All operations go through `DockerHost`:
- `dockerHost.getServiceByName(name)` - Get service by name
- `dockerHost.getSecretByName(name)` - Get secret by name
- `dockerHost.createImageFromRegistry({ imageUrl })` - Pull image
- `dockerHost.createSecret({ name, contentArg, version, labels })` - Create secret
- `dockerHost.createService({ name, image, networks, secrets, ports, ... })` - Create service
- `dockerHost.activateSwarm()` - Initialize swarm mode
## Testing
- Tests use Deno's built-in test runner (`deno task test`)
- Test files in `test/` directory
- The first test disables resource/ops sanitization due to signal listeners from smartdaemon
## Build
- No build step needed for the TypeScript code
- Binary compilation: `deno task compile:all` or `bash scripts/compile-all.sh`
- npm package uses postinstall script to download pre-compiled binaries

View File

@@ -36,7 +36,7 @@ npm install -g @serve.zone/spark
### Specific Version
```bash
curl -sSL https://code.foss.global/serve.zone/spark/raw/branch/master/install.sh | sudo bash -s -- --version v1.2.2
curl -sSL https://code.foss.global/serve.zone/spark/raw/branch/master/install.sh | sudo bash -s -- --version v1.2.4
```
### Manual Installation
@@ -45,7 +45,7 @@ Download the binary for your platform from the [releases page](https://code.foss
```bash
# Example for Linux x64
wget https://code.foss.global/serve.zone/spark/releases/download/v1.2.2/spark-linux-x64
wget https://code.foss.global/serve.zone/spark/releases/download/v1.2.4/spark-linux-x64
chmod +x spark-linux-x64
sudo mv spark-linux-x64 /usr/local/bin/spark
```

View File

@@ -3,10 +3,15 @@ import * as spark from '../ts/index.ts';
let testSpark: spark.Spark;
Deno.test('should create a spark instance', () => {
testSpark = new spark.Spark();
assert(testSpark instanceof spark.Spark);
assertExists(testSpark);
Deno.test({
name: 'should create a spark instance',
fn: () => {
testSpark = new spark.Spark();
assert(testSpark instanceof spark.Spark);
assertExists(testSpark);
},
sanitizeResources: false,
sanitizeOps: false,
});
Deno.test('should have spark info', () => {

View File

@@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@serve.zone/spark',
version: '1.2.4',
version: '1.2.5',
description: 'A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure and used by @serve.zone/cloudly as a cluster node server system manager.'
}

View File

@@ -38,14 +38,8 @@ export class SparkServicesManager {
public async updateServices() {
for (const service of this.services) {
const existingService = await plugins.docker.DockerService.getServiceByName(
this.dockerHost,
service.name
);
const existingServiceSecret = await plugins.docker.DockerSecret.getSecretByName(
this.dockerHost,
`${service.name}Secret`
);
const existingService = await this.dockerHost.getServiceByName(service.name);
const existingServiceSecret = await this.dockerHost.getSecretByName(`${service.name}Secret`);
if (existingService) {
const needsUpdate: boolean = await existingService.needsUpdate();
if (!needsUpdate) {
@@ -56,28 +50,25 @@ export class SparkServicesManager {
// continuing here means we need to update the service
logger.log('ok', `${service.name} needs to be updated!`);
await existingService.remove();
await existingServiceSecret.remove();
if (existingServiceSecret) {
await existingServiceSecret.remove();
}
}
if (!existingService && existingServiceSecret) {
await existingServiceSecret.remove();
}
const newServiceImage = await plugins.docker.DockerImage.createFromRegistry(
this.dockerHost,
{
creationObject: {
imageUrl: service.image,
},
}
);
const newServiceSecret = await plugins.docker.DockerSecret.createSecret(this.dockerHost, {
const newServiceImage = await this.dockerHost.createImageFromRegistry({
imageUrl: service.image,
});
const newServiceSecret = await this.dockerHost.createSecret({
name: `${service.name}Secret`,
contentArg: plugins.smartjson.stringify(service.secretJson),
version: await newServiceImage.getVersion(),
labels: {},
});
const newService = await plugins.docker.DockerService.createService(this.dockerHost, {
const newService = await this.dockerHost.createService({
image: newServiceImage,
labels: {},
name: service.name,
@@ -88,7 +79,7 @@ export class SparkServicesManager {
});
logger.log('ok', `updated service >>${newService.Spec.Name}<<!`);
}
logger.log('success', `updated ${this.services.length} services!`);
}
}