Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 91ed2a5b5d | |||
| f7c9be25b7 | |||
| 67f97e6115 | |||
| da4b5ff9a3 | |||
| 526b4f46dd | |||
| 34cac57de8 | |||
| 5badfc72f4 | |||
| a72227ce0d | |||
| bf4f405bc3 | |||
| 6cc1df8bdb | |||
| ccae0efa07 | |||
| 875971407e | |||
| 47b186ebd1 | |||
| d306f5e184 | |||
| da443d0734 |
26
.gitea/release-template.md
Normal file
26
.gitea/release-template.md
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
## SPARK {{VERSION}}
|
||||||
|
|
||||||
|
Pre-compiled binaries for multiple platforms.
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
#### Option 1: Via installer script (recommended)
|
||||||
|
```bash
|
||||||
|
curl -sSL https://code.foss.global/serve.zone/spark/raw/branch/master/install.sh | sudo bash
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Option 2: Direct binary download
|
||||||
|
Download the appropriate binary for your platform from the assets below and make it executable.
|
||||||
|
|
||||||
|
### Supported Platforms
|
||||||
|
- Linux x86_64 (x64)
|
||||||
|
- Linux ARM64 (aarch64)
|
||||||
|
- macOS x86_64 (Intel)
|
||||||
|
- macOS ARM64 (Apple Silicon)
|
||||||
|
- Windows x86_64
|
||||||
|
|
||||||
|
### Checksums
|
||||||
|
SHA256 checksums are provided in `SHA256SUMS.txt` for binary verification.
|
||||||
|
|
||||||
|
### What is SPARK?
|
||||||
|
SPARK is a comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure and used by @serve.zone/cloudly as a cluster node server system manager.
|
||||||
85
.gitea/workflows/ci.yml
Normal file
85
.gitea/workflows/ci.yml
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
name: CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
- main
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check:
|
||||||
|
name: Type Check & Lint
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Deno
|
||||||
|
uses: denoland/setup-deno@v1
|
||||||
|
with:
|
||||||
|
deno-version: v2.x
|
||||||
|
|
||||||
|
- name: Check TypeScript types
|
||||||
|
run: deno check mod.ts
|
||||||
|
|
||||||
|
- name: Lint code
|
||||||
|
run: deno lint
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Format check
|
||||||
|
run: deno fmt --check
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
build:
|
||||||
|
name: Build Test (Current Platform)
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Deno
|
||||||
|
uses: denoland/setup-deno@v1
|
||||||
|
with:
|
||||||
|
deno-version: v2.x
|
||||||
|
|
||||||
|
- name: Compile for current platform
|
||||||
|
run: |
|
||||||
|
echo "Testing compilation for Linux x86_64..."
|
||||||
|
deno compile --allow-all --no-check \
|
||||||
|
--output spark-test \
|
||||||
|
--target x86_64-unknown-linux-gnu mod.ts
|
||||||
|
|
||||||
|
- name: Test binary execution
|
||||||
|
run: |
|
||||||
|
chmod +x spark-test
|
||||||
|
./spark-test --version || echo "Version command may not work yet - OK for now"
|
||||||
|
./spark-test help || echo "Help command may not work yet - OK for now"
|
||||||
|
|
||||||
|
build-all:
|
||||||
|
name: Build All Platforms
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Deno
|
||||||
|
uses: denoland/setup-deno@v1
|
||||||
|
with:
|
||||||
|
deno-version: v2.x
|
||||||
|
|
||||||
|
- name: Compile all platform binaries
|
||||||
|
run: bash scripts/compile-all.sh
|
||||||
|
|
||||||
|
- name: Upload all binaries as artifact
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: spark-binaries.zip
|
||||||
|
path: dist/binaries/*
|
||||||
|
retention-days: 30
|
||||||
249
.gitea/workflows/release.yml
Normal file
249
.gitea/workflows/release.yml
Normal file
@@ -0,0 +1,249 @@
|
|||||||
|
name: Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- 'v*'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-release:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set up Deno
|
||||||
|
uses: denoland/setup-deno@v1
|
||||||
|
with:
|
||||||
|
deno-version: v2.x
|
||||||
|
|
||||||
|
- name: Get version from tag
|
||||||
|
id: version
|
||||||
|
run: |
|
||||||
|
VERSION=${GITHUB_REF#refs/tags/}
|
||||||
|
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||||
|
echo "version_number=${VERSION#v}" >> $GITHUB_OUTPUT
|
||||||
|
echo "Building version: $VERSION"
|
||||||
|
|
||||||
|
- name: Verify deno.json version matches tag
|
||||||
|
run: |
|
||||||
|
DENO_VERSION=$(grep -o '"version": "[^"]*"' deno.json | cut -d'"' -f4)
|
||||||
|
TAG_VERSION="${{ steps.version.outputs.version_number }}"
|
||||||
|
echo "deno.json version: $DENO_VERSION"
|
||||||
|
echo "Tag version: $TAG_VERSION"
|
||||||
|
if [ "$DENO_VERSION" != "$TAG_VERSION" ]; then
|
||||||
|
echo "ERROR: Version mismatch!"
|
||||||
|
echo "deno.json has version $DENO_VERSION but tag is $TAG_VERSION"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Compile binaries for all platforms
|
||||||
|
run: |
|
||||||
|
echo "================================================"
|
||||||
|
echo " SPARK Release Compilation"
|
||||||
|
echo " Version: ${{ steps.version.outputs.version }}"
|
||||||
|
echo "================================================"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Clean up old binaries and create fresh directory
|
||||||
|
rm -rf dist/binaries
|
||||||
|
mkdir -p dist/binaries
|
||||||
|
echo "→ Cleaned old binaries from dist/binaries"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Linux x86_64
|
||||||
|
echo "→ Compiling for Linux x86_64..."
|
||||||
|
deno compile --allow-all --no-check \
|
||||||
|
--output dist/binaries/spark-linux-x64 \
|
||||||
|
--target x86_64-unknown-linux-gnu mod.ts
|
||||||
|
echo " ✓ Linux x86_64 complete"
|
||||||
|
|
||||||
|
# Linux ARM64
|
||||||
|
echo "→ Compiling for Linux ARM64..."
|
||||||
|
deno compile --allow-all --no-check \
|
||||||
|
--output dist/binaries/spark-linux-arm64 \
|
||||||
|
--target aarch64-unknown-linux-gnu mod.ts
|
||||||
|
echo " ✓ Linux ARM64 complete"
|
||||||
|
|
||||||
|
# macOS x86_64
|
||||||
|
echo "→ Compiling for macOS x86_64..."
|
||||||
|
deno compile --allow-all --no-check \
|
||||||
|
--output dist/binaries/spark-macos-x64 \
|
||||||
|
--target x86_64-apple-darwin mod.ts
|
||||||
|
echo " ✓ macOS x86_64 complete"
|
||||||
|
|
||||||
|
# macOS ARM64
|
||||||
|
echo "→ Compiling for macOS ARM64..."
|
||||||
|
deno compile --allow-all --no-check \
|
||||||
|
--output dist/binaries/spark-macos-arm64 \
|
||||||
|
--target aarch64-apple-darwin mod.ts
|
||||||
|
echo " ✓ macOS ARM64 complete"
|
||||||
|
|
||||||
|
# Windows x86_64
|
||||||
|
echo "→ Compiling for Windows x86_64..."
|
||||||
|
deno compile --allow-all --no-check \
|
||||||
|
--output dist/binaries/spark-windows-x64.exe \
|
||||||
|
--target x86_64-pc-windows-msvc mod.ts
|
||||||
|
echo " ✓ Windows x86_64 complete"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "All binaries compiled successfully!"
|
||||||
|
ls -lh dist/binaries/
|
||||||
|
|
||||||
|
- name: Generate SHA256 checksums
|
||||||
|
run: |
|
||||||
|
cd dist/binaries
|
||||||
|
sha256sum * > SHA256SUMS.txt
|
||||||
|
cat SHA256SUMS.txt
|
||||||
|
cd ../..
|
||||||
|
|
||||||
|
- name: Extract changelog for this version
|
||||||
|
id: changelog
|
||||||
|
run: |
|
||||||
|
VERSION="${{ steps.version.outputs.version }}"
|
||||||
|
|
||||||
|
# Check if changelog.md exists
|
||||||
|
if [ ! -f changelog.md ]; then
|
||||||
|
echo "No changelog.md found, using default release notes"
|
||||||
|
cat > /tmp/release_notes.md << EOF
|
||||||
|
## SPARK $VERSION
|
||||||
|
|
||||||
|
Pre-compiled binaries for multiple platforms.
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
Use the installation script:
|
||||||
|
\`\`\`bash
|
||||||
|
curl -sSL https://code.foss.global/serve.zone/spark/raw/branch/master/install.sh | sudo bash
|
||||||
|
\`\`\`
|
||||||
|
|
||||||
|
Or download the binary for your platform and make it executable.
|
||||||
|
|
||||||
|
### Supported Platforms
|
||||||
|
- Linux x86_64 (x64)
|
||||||
|
- Linux ARM64 (aarch64)
|
||||||
|
- macOS x86_64 (Intel)
|
||||||
|
- macOS ARM64 (Apple Silicon)
|
||||||
|
- Windows x86_64
|
||||||
|
|
||||||
|
### Checksums
|
||||||
|
SHA256 checksums are provided in SHA256SUMS.txt
|
||||||
|
EOF
|
||||||
|
else
|
||||||
|
# Try to extract section for this version from changelog.md
|
||||||
|
# This is a simple extraction - adjust based on your changelog format
|
||||||
|
awk "/## \[$VERSION\]/,/## \[/" changelog.md | sed '$d' > /tmp/release_notes.md || cat > /tmp/release_notes.md << EOF
|
||||||
|
## SPARK $VERSION
|
||||||
|
|
||||||
|
See changelog.md for full details.
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
Use the installation script:
|
||||||
|
\`\`\`bash
|
||||||
|
curl -sSL https://code.foss.global/serve.zone/spark/raw/branch/master/install.sh | sudo bash
|
||||||
|
\`\`\`
|
||||||
|
EOF
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Release notes:"
|
||||||
|
cat /tmp/release_notes.md
|
||||||
|
|
||||||
|
- name: Delete existing release if it exists
|
||||||
|
run: |
|
||||||
|
VERSION="${{ steps.version.outputs.version }}"
|
||||||
|
|
||||||
|
echo "Checking for existing release $VERSION..."
|
||||||
|
|
||||||
|
# Try to get existing release by tag
|
||||||
|
EXISTING_RELEASE_ID=$(curl -s \
|
||||||
|
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||||
|
"https://code.foss.global/api/v1/repos/serve.zone/spark/releases/tags/$VERSION" \
|
||||||
|
| jq -r '.id // empty')
|
||||||
|
|
||||||
|
if [ -n "$EXISTING_RELEASE_ID" ]; then
|
||||||
|
echo "Found existing release (ID: $EXISTING_RELEASE_ID), deleting..."
|
||||||
|
curl -X DELETE -s \
|
||||||
|
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||||
|
"https://code.foss.global/api/v1/repos/serve.zone/spark/releases/$EXISTING_RELEASE_ID"
|
||||||
|
echo "Existing release deleted"
|
||||||
|
sleep 2
|
||||||
|
else
|
||||||
|
echo "No existing release found, proceeding with creation"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Create Gitea Release
|
||||||
|
run: |
|
||||||
|
VERSION="${{ steps.version.outputs.version }}"
|
||||||
|
RELEASE_NOTES=$(cat /tmp/release_notes.md)
|
||||||
|
|
||||||
|
# Create the release
|
||||||
|
echo "Creating release for $VERSION..."
|
||||||
|
RELEASE_ID=$(curl -X POST -s \
|
||||||
|
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
"https://code.foss.global/api/v1/repos/serve.zone/spark/releases" \
|
||||||
|
-d "{
|
||||||
|
\"tag_name\": \"$VERSION\",
|
||||||
|
\"name\": \"SPARK $VERSION\",
|
||||||
|
\"body\": $(jq -Rs . /tmp/release_notes.md),
|
||||||
|
\"draft\": false,
|
||||||
|
\"prerelease\": false
|
||||||
|
}" | jq -r '.id')
|
||||||
|
|
||||||
|
echo "Release created with ID: $RELEASE_ID"
|
||||||
|
|
||||||
|
# Upload binaries as release assets
|
||||||
|
for binary in dist/binaries/*; do
|
||||||
|
filename=$(basename "$binary")
|
||||||
|
echo "Uploading $filename..."
|
||||||
|
curl -X POST -s \
|
||||||
|
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||||
|
-H "Content-Type: application/octet-stream" \
|
||||||
|
--data-binary "@$binary" \
|
||||||
|
"https://code.foss.global/api/v1/repos/serve.zone/spark/releases/$RELEASE_ID/assets?name=$filename"
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "All assets uploaded successfully"
|
||||||
|
|
||||||
|
- name: Clean up old releases
|
||||||
|
run: |
|
||||||
|
echo "Cleaning up old releases (keeping only last 3)..."
|
||||||
|
|
||||||
|
# Fetch all releases sorted by creation date
|
||||||
|
RELEASES=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||||
|
"https://code.foss.global/api/v1/repos/serve.zone/spark/releases" | \
|
||||||
|
jq -r 'sort_by(.created_at) | reverse | .[3:] | .[].id')
|
||||||
|
|
||||||
|
# Delete old releases
|
||||||
|
if [ -n "$RELEASES" ]; then
|
||||||
|
echo "Found releases to delete:"
|
||||||
|
for release_id in $RELEASES; do
|
||||||
|
echo " Deleting release ID: $release_id"
|
||||||
|
curl -X DELETE -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||||
|
"https://code.foss.global/api/v1/repos/serve.zone/spark/releases/$release_id"
|
||||||
|
done
|
||||||
|
echo "Old releases deleted successfully"
|
||||||
|
else
|
||||||
|
echo "No old releases to delete (less than 4 releases total)"
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
- name: Release Summary
|
||||||
|
run: |
|
||||||
|
echo "================================================"
|
||||||
|
echo " Release ${{ steps.version.outputs.version }} Complete!"
|
||||||
|
echo "================================================"
|
||||||
|
echo ""
|
||||||
|
echo "Binaries published:"
|
||||||
|
ls -lh dist/binaries/
|
||||||
|
echo ""
|
||||||
|
echo "Release URL:"
|
||||||
|
echo "https://code.foss.global/serve.zone/spark/releases/tag/${{ steps.version.outputs.version }}"
|
||||||
|
echo ""
|
||||||
|
echo "Installation command:"
|
||||||
|
echo "curl -sSL https://code.foss.global/serve.zone/spark/raw/branch/master/install.sh | sudo bash"
|
||||||
|
echo ""
|
||||||
55
.npmignore
Normal file
55
.npmignore
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
# Source code (not needed for binary distribution)
|
||||||
|
/ts/
|
||||||
|
/test/
|
||||||
|
mod.ts
|
||||||
|
*.ts
|
||||||
|
!*.d.ts
|
||||||
|
|
||||||
|
# Development files
|
||||||
|
.git/
|
||||||
|
.gitea/
|
||||||
|
.claude/
|
||||||
|
.serena/
|
||||||
|
.nogit/
|
||||||
|
.github/
|
||||||
|
deno.json
|
||||||
|
deno.lock
|
||||||
|
tsconfig.json
|
||||||
|
|
||||||
|
# Scripts not needed for npm
|
||||||
|
/scripts/compile-all.sh
|
||||||
|
install.sh
|
||||||
|
uninstall.sh
|
||||||
|
test.simple.ts
|
||||||
|
|
||||||
|
# Documentation files not needed for npm package
|
||||||
|
readme.plan.md
|
||||||
|
readme.hints.md
|
||||||
|
npm-publish-instructions.md
|
||||||
|
docs/
|
||||||
|
|
||||||
|
# IDE and editor files
|
||||||
|
.vscode/
|
||||||
|
.idea/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
*~
|
||||||
|
.DS_Store
|
||||||
|
|
||||||
|
# Keep only the install-binary.js in scripts/
|
||||||
|
/scripts/*
|
||||||
|
!/scripts/install-binary.js
|
||||||
|
|
||||||
|
# Exclude all dist directory (binaries will be downloaded during install)
|
||||||
|
/dist/
|
||||||
|
|
||||||
|
# Logs and temporary files
|
||||||
|
*.log
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
|
||||||
|
# Other
|
||||||
|
node_modules/
|
||||||
|
.env
|
||||||
|
.env.*
|
||||||
1
.serena/.gitignore
vendored
Normal file
1
.serena/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
/cache
|
||||||
47
.serena/memories/spark_project_overview.md
Normal file
47
.serena/memories/spark_project_overview.md
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
# Spark Project Overview
|
||||||
|
|
||||||
|
## Project Purpose
|
||||||
|
Spark is a comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure and used by @serve.zone/cloudly as a cluster node server system manager.
|
||||||
|
|
||||||
|
## Tech Stack
|
||||||
|
- **Language**: TypeScript
|
||||||
|
- **Runtime**: Node.js (currently)
|
||||||
|
- **Package Manager**: pnpm
|
||||||
|
- **Build Tool**: @git.zone/tsbuild
|
||||||
|
- **Test Framework**: @git.zone/tstest with @push.rocks/tapbundle
|
||||||
|
- **CLI Framework**: @push.rocks/smartcli
|
||||||
|
- **Version**: 1.2.2
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
```
|
||||||
|
spark/
|
||||||
|
├── ts/ # TypeScript source files
|
||||||
|
├── test/ # Test files (single test.nonci.ts)
|
||||||
|
├── dist_ts/ # Compiled TypeScript output
|
||||||
|
├── cli.js # CLI entry point
|
||||||
|
├── cli.child.ts # Child process CLI
|
||||||
|
├── cli.ts.js # TypeScript CLI wrapper
|
||||||
|
└── package.json # Dependencies and scripts
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Dependencies
|
||||||
|
- **@serve.zone/api**: API client for Servezone
|
||||||
|
- **@serve.zone/interfaces**: Interface definitions
|
||||||
|
- **@apiclient.xyz/docker**: Docker API client
|
||||||
|
- **@push.rocks/*** packages: Various utilities (smartlog, smartfile, smartcli, smartdaemon, etc.)
|
||||||
|
|
||||||
|
## Main Components
|
||||||
|
1. **CLI** (spark.cli.ts): Command-line interface with commands like installdaemon, updatedaemon, asdaemon
|
||||||
|
2. **Spark** (spark.classes.spark.ts): Main application class
|
||||||
|
3. **TaskManager** (spark.classes.taskmanager.ts): Task scheduling
|
||||||
|
4. **UpdateManager** (spark.classes.updatemanager.ts): Service updates
|
||||||
|
5. **Config** (spark.classes.config.ts): Configuration management
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
- `pnpm build`: Build the TypeScript code
|
||||||
|
- `pnpm test`: Run tests
|
||||||
|
- `spark installdaemon`: Install as system daemon
|
||||||
|
- `spark updatedaemon`: Update daemon service
|
||||||
|
- `spark asdaemon`: Run as daemon
|
||||||
|
- `spark logs`: View daemon logs
|
||||||
|
- `spark prune`: Clean up resources
|
||||||
71
.serena/project.yml
Normal file
71
.serena/project.yml
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
# language of the project (csharp, python, rust, java, typescript, go, cpp, or ruby)
|
||||||
|
# * For C, use cpp
|
||||||
|
# * For JavaScript, use typescript
|
||||||
|
# Special requirements:
|
||||||
|
# * csharp: Requires the presence of a .sln file in the project folder.
|
||||||
|
language: typescript
|
||||||
|
|
||||||
|
# the encoding used by text files in the project
|
||||||
|
# For a list of possible encodings, see https://docs.python.org/3.11/library/codecs.html#standard-encodings
|
||||||
|
encoding: "utf-8"
|
||||||
|
|
||||||
|
# whether to use the project's gitignore file to ignore files
|
||||||
|
# Added on 2025-04-07
|
||||||
|
ignore_all_files_in_gitignore: true
|
||||||
|
# list of additional paths to ignore
|
||||||
|
# same syntax as gitignore, so you can use * and **
|
||||||
|
# Was previously called `ignored_dirs`, please update your config if you are using that.
|
||||||
|
# Added (renamed) on 2025-04-07
|
||||||
|
ignored_paths: []
|
||||||
|
|
||||||
|
# whether the project is in read-only mode
|
||||||
|
# If set to true, all editing tools will be disabled and attempts to use them will result in an error
|
||||||
|
# Added on 2025-04-18
|
||||||
|
read_only: false
|
||||||
|
|
||||||
|
# list of tool names to exclude. We recommend not excluding any tools, see the readme for more details.
|
||||||
|
# Below is the complete list of tools for convenience.
|
||||||
|
# To make sure you have the latest list of tools, and to view their descriptions,
|
||||||
|
# execute `uv run scripts/print_tool_overview.py`.
|
||||||
|
#
|
||||||
|
# * `activate_project`: Activates a project by name.
|
||||||
|
# * `check_onboarding_performed`: Checks whether project onboarding was already performed.
|
||||||
|
# * `create_text_file`: Creates/overwrites a file in the project directory.
|
||||||
|
# * `delete_lines`: Deletes a range of lines within a file.
|
||||||
|
# * `delete_memory`: Deletes a memory from Serena's project-specific memory store.
|
||||||
|
# * `execute_shell_command`: Executes a shell command.
|
||||||
|
# * `find_referencing_code_snippets`: Finds code snippets in which the symbol at the given location is referenced.
|
||||||
|
# * `find_referencing_symbols`: Finds symbols that reference the symbol at the given location (optionally filtered by type).
|
||||||
|
# * `find_symbol`: Performs a global (or local) search for symbols with/containing a given name/substring (optionally filtered by type).
|
||||||
|
# * `get_current_config`: Prints the current configuration of the agent, including the active and available projects, tools, contexts, and modes.
|
||||||
|
# * `get_symbols_overview`: Gets an overview of the top-level symbols defined in a given file.
|
||||||
|
# * `initial_instructions`: Gets the initial instructions for the current project.
|
||||||
|
# Should only be used in settings where the system prompt cannot be set,
|
||||||
|
# e.g. in clients you have no control over, like Claude Desktop.
|
||||||
|
# * `insert_after_symbol`: Inserts content after the end of the definition of a given symbol.
|
||||||
|
# * `insert_at_line`: Inserts content at a given line in a file.
|
||||||
|
# * `insert_before_symbol`: Inserts content before the beginning of the definition of a given symbol.
|
||||||
|
# * `list_dir`: Lists files and directories in the given directory (optionally with recursion).
|
||||||
|
# * `list_memories`: Lists memories in Serena's project-specific memory store.
|
||||||
|
# * `onboarding`: Performs onboarding (identifying the project structure and essential tasks, e.g. for testing or building).
|
||||||
|
# * `prepare_for_new_conversation`: Provides instructions for preparing for a new conversation (in order to continue with the necessary context).
|
||||||
|
# * `read_file`: Reads a file within the project directory.
|
||||||
|
# * `read_memory`: Reads the memory with the given name from Serena's project-specific memory store.
|
||||||
|
# * `remove_project`: Removes a project from the Serena configuration.
|
||||||
|
# * `replace_lines`: Replaces a range of lines within a file with new content.
|
||||||
|
# * `replace_symbol_body`: Replaces the full definition of a symbol.
|
||||||
|
# * `restart_language_server`: Restarts the language server, may be necessary when edits not through Serena happen.
|
||||||
|
# * `search_for_pattern`: Performs a search for a pattern in the project.
|
||||||
|
# * `summarize_changes`: Provides instructions for summarizing the changes made to the codebase.
|
||||||
|
# * `switch_modes`: Activates modes by providing a list of their names
|
||||||
|
# * `think_about_collected_information`: Thinking tool for pondering the completeness of collected information.
|
||||||
|
# * `think_about_task_adherence`: Thinking tool for determining whether the agent is still on track with the current task.
|
||||||
|
# * `think_about_whether_you_are_done`: Thinking tool for determining whether the task is truly completed.
|
||||||
|
# * `write_memory`: Writes a named memory (for future reference) to Serena's project-specific memory store.
|
||||||
|
excluded_tools: []
|
||||||
|
|
||||||
|
# initial prompt for the project. It will always be given to the LLM upon activating the project
|
||||||
|
# (contrary to the memories, which are loaded on demand).
|
||||||
|
initial_prompt: ""
|
||||||
|
|
||||||
|
project_name: "spark"
|
||||||
108
bin/spark-wrapper.js
Executable file
108
bin/spark-wrapper.js
Executable file
@@ -0,0 +1,108 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SPARK npm wrapper
|
||||||
|
* This script executes the appropriate pre-compiled binary based on the current platform
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { spawn } from 'child_process';
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
|
import { dirname, join } from 'path';
|
||||||
|
import { existsSync } from 'fs';
|
||||||
|
import { platform, arch } from 'os';
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = dirname(__filename);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the binary name for the current platform
|
||||||
|
*/
|
||||||
|
function getBinaryName() {
|
||||||
|
const plat = platform();
|
||||||
|
const architecture = arch();
|
||||||
|
|
||||||
|
// Map Node's platform/arch to our binary naming
|
||||||
|
const platformMap = {
|
||||||
|
'darwin': 'macos',
|
||||||
|
'linux': 'linux',
|
||||||
|
'win32': 'windows'
|
||||||
|
};
|
||||||
|
|
||||||
|
const archMap = {
|
||||||
|
'x64': 'x64',
|
||||||
|
'arm64': 'arm64'
|
||||||
|
};
|
||||||
|
|
||||||
|
const mappedPlatform = platformMap[plat];
|
||||||
|
const mappedArch = archMap[architecture];
|
||||||
|
|
||||||
|
if (!mappedPlatform || !mappedArch) {
|
||||||
|
console.error(`Error: Unsupported platform/architecture: ${plat}/${architecture}`);
|
||||||
|
console.error('Supported platforms: Linux, macOS, Windows');
|
||||||
|
console.error('Supported architectures: x64, arm64');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Construct binary name
|
||||||
|
let binaryName = `spark-${mappedPlatform}-${mappedArch}`;
|
||||||
|
if (plat === 'win32') {
|
||||||
|
binaryName += '.exe';
|
||||||
|
}
|
||||||
|
|
||||||
|
return binaryName;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute the binary
|
||||||
|
*/
|
||||||
|
function executeBinary() {
|
||||||
|
const binaryName = getBinaryName();
|
||||||
|
const binaryPath = join(__dirname, '..', 'dist', 'binaries', binaryName);
|
||||||
|
|
||||||
|
// Check if binary exists
|
||||||
|
if (!existsSync(binaryPath)) {
|
||||||
|
console.error(`Error: Binary not found at ${binaryPath}`);
|
||||||
|
console.error('This might happen if:');
|
||||||
|
console.error('1. The postinstall script failed to run');
|
||||||
|
console.error('2. The platform is not supported');
|
||||||
|
console.error('3. The package was not installed correctly');
|
||||||
|
console.error('');
|
||||||
|
console.error('Try reinstalling the package:');
|
||||||
|
console.error(' npm uninstall -g @serve.zone/spark');
|
||||||
|
console.error(' npm install -g @serve.zone/spark');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Spawn the binary with all arguments passed through
|
||||||
|
const child = spawn(binaryPath, process.argv.slice(2), {
|
||||||
|
stdio: 'inherit',
|
||||||
|
shell: false
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle child process events
|
||||||
|
child.on('error', (err) => {
|
||||||
|
console.error(`Error executing spark: ${err.message}`);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
child.on('exit', (code, signal) => {
|
||||||
|
if (signal) {
|
||||||
|
process.kill(process.pid, signal);
|
||||||
|
} else {
|
||||||
|
process.exit(code || 0);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Forward signals to child process
|
||||||
|
const signals = ['SIGINT', 'SIGTERM', 'SIGHUP'];
|
||||||
|
signals.forEach(signal => {
|
||||||
|
process.on(signal, () => {
|
||||||
|
if (!child.killed) {
|
||||||
|
child.kill(signal);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute
|
||||||
|
executeBinary();
|
||||||
47
changelog.md
Normal file
47
changelog.md
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
## 2025-10-23 - 1.2.3 - fix(package)
|
||||||
|
Add .claude local settings with development permissions
|
||||||
|
|
||||||
|
- Add .claude/settings.local.json to define local development permissions for Claude tooling
|
||||||
|
- No runtime or library code modified; this is a local configuration file
|
||||||
|
- Patch bump recommended
|
||||||
|
|
||||||
|
## 2024-12-20 - 1.2.2 - fix(core)
|
||||||
|
Refactored configuration management classes and improved service update handling
|
||||||
|
|
||||||
|
- Replaced SparkLocalConfig with SparkConfig for configuration management.
|
||||||
|
- Improved service handling and update check logic.
|
||||||
|
- Consolidated service definition and update logic for better maintainability.
|
||||||
|
|
||||||
|
## 2024-12-19 - 1.2.1 - fix(taskmanager)
|
||||||
|
Remove checkinSlackTask from SparkTaskManager for streamlined task management
|
||||||
|
|
||||||
|
- checkinSlackTask has been removed from the task manager class.
|
||||||
|
- Removal of the slack check-in task allows the system to focus on essential update tasks.
|
||||||
|
|
||||||
|
## 2024-12-18 - 1.2.0 - feat(core)
|
||||||
|
Initial commit of the Spark project with core functionalities for server management and integration with Docker.
|
||||||
|
|
||||||
|
- Add core functionalities for server maintenance and configuration.
|
||||||
|
- Integrate Docker for advanced task scheduling and service management.
|
||||||
|
- Provide CLI commands for daemon management and task execution.
|
||||||
|
|
||||||
|
## 2024-12-18 - 1.1.0 - feat(core)
|
||||||
|
Update package dependencies and improve API integration.
|
||||||
|
|
||||||
|
- Updated devDependencies and dependencies in package.json.
|
||||||
|
- Integrated new package @serve.zone/api.
|
||||||
|
- Updated identityArg in SparkLocalConfig for userHomeDir kvStore.
|
||||||
|
|
||||||
|
## 2024-06-13 - 1.0.85 to 1.0.90 - Core Updates
|
||||||
|
Routine updates and fixes to core functionality.
|
||||||
|
|
||||||
|
- Updated core component throughout versions for enhanced stability
|
||||||
|
- Incremental improvements applied on versions 1.0.85 to 1.0.90
|
||||||
|
|
||||||
|
## 2024-05-08 - 1.0.82 to 1.0.85 - Core Enhancements
|
||||||
|
Consistent updates made to improve core operations.
|
||||||
|
|
||||||
|
- Updates focused on core functionality for improved performance
|
||||||
|
- Series of updates applied from versions 1.0.82 to 1.0.85
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
#!/usr/bin/env node
|
|
||||||
process.env.CLI_CALL = 'true';
|
|
||||||
import * as cliTool from './ts/index.js';
|
|
||||||
cliTool.runCli();
|
|
||||||
4
cli.js
4
cli.js
@@ -1,4 +0,0 @@
|
|||||||
#!/usr/bin/env node
|
|
||||||
process.env.CLI_CALL = 'true';
|
|
||||||
const cliTool = await import('./dist_ts/index.js');
|
|
||||||
cliTool.runCli();
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
#!/usr/bin/env node
|
|
||||||
process.env.CLI_CALL = 'true';
|
|
||||||
|
|
||||||
import * as tsrun from '@git.zone/tsrun';
|
|
||||||
tsrun.runPath('./cli.child.js', import.meta.url);
|
|
||||||
62
deno.json
Normal file
62
deno.json
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
{
|
||||||
|
"name": "@serve.zone/spark",
|
||||||
|
"version": "1.2.3",
|
||||||
|
"exports": "./mod.ts",
|
||||||
|
"tasks": {
|
||||||
|
"dev": "deno run --allow-all mod.ts",
|
||||||
|
"compile": "deno task compile:all",
|
||||||
|
"compile:all": "bash scripts/compile-all.sh",
|
||||||
|
"test": "deno test --allow-all test/",
|
||||||
|
"test:watch": "deno test --allow-all --watch test/",
|
||||||
|
"check": "deno check mod.ts",
|
||||||
|
"fmt": "deno fmt",
|
||||||
|
"lint": "deno lint"
|
||||||
|
},
|
||||||
|
"lint": {
|
||||||
|
"rules": {
|
||||||
|
"tags": [
|
||||||
|
"recommended"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fmt": {
|
||||||
|
"useTabs": false,
|
||||||
|
"lineWidth": 100,
|
||||||
|
"indentWidth": 2,
|
||||||
|
"semiColons": true,
|
||||||
|
"singleQuote": true
|
||||||
|
},
|
||||||
|
"compilerOptions": {
|
||||||
|
"lib": [
|
||||||
|
"deno.window"
|
||||||
|
],
|
||||||
|
"strict": true
|
||||||
|
},
|
||||||
|
"imports": {
|
||||||
|
"@std/path": "jsr:@std/path@^1.0.0",
|
||||||
|
"@std/fmt": "jsr:@std/fmt@^1.0.0",
|
||||||
|
"@std/assert": "jsr:@std/assert@^1.0.0",
|
||||||
|
"@serve.zone/interfaces": "npm:@serve.zone/interfaces@^4.5.1",
|
||||||
|
"@serve.zone/api": "npm:@serve.zone/api@^4.5.1",
|
||||||
|
"@apiclient.xyz/docker": "npm:@apiclient.xyz/docker@^1.2.7",
|
||||||
|
"@push.rocks/npmextra": "npm:@push.rocks/npmextra@^5.1.2",
|
||||||
|
"@push.rocks/projectinfo": "npm:@push.rocks/projectinfo@^5.0.1",
|
||||||
|
"@push.rocks/qenv": "npm:@push.rocks/qenv@^6.1.0",
|
||||||
|
"@push.rocks/smartcli": "npm:@push.rocks/smartcli@^4.0.11",
|
||||||
|
"@push.rocks/smartdaemon": "npm:@push.rocks/smartdaemon@^2.0.3",
|
||||||
|
"@push.rocks/smartdelay": "npm:@push.rocks/smartdelay@^3.0.5",
|
||||||
|
"@push.rocks/smartfile": "npm:@push.rocks/smartfile@^11.0.23",
|
||||||
|
"@push.rocks/smartjson": "npm:@push.rocks/smartjson@^5.0.20",
|
||||||
|
"@push.rocks/smartlog": "npm:@push.rocks/smartlog@^3.0.7",
|
||||||
|
"@push.rocks/smartlog-destination-local": "npm:@push.rocks/smartlog-destination-local@^9.0.0",
|
||||||
|
"@push.rocks/smartpath": "npm:@push.rocks/smartpath@^5.0.5",
|
||||||
|
"@push.rocks/smartshell": "npm:@push.rocks/smartshell@^3.2.2",
|
||||||
|
"@push.rocks/smartupdate": "npm:@push.rocks/smartupdate@^2.0.4",
|
||||||
|
"@push.rocks/taskbuffer": "npm:@push.rocks/taskbuffer@^3.0.10",
|
||||||
|
"@push.rocks/smartexpect": "npm:@push.rocks/smartexpect@^1.0.15",
|
||||||
|
"@push.rocks/smartrx": "npm:@push.rocks/smartrx@^3.0.10",
|
||||||
|
"@push.rocks/smartpromise": "npm:@push.rocks/smartpromise@^4.0.0",
|
||||||
|
"@push.rocks/smartstring": "npm:@push.rocks/smartstring@^4.0.0",
|
||||||
|
"@push.rocks/smarttime": "npm:@push.rocks/smarttime@^4.0.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
292
install.sh
Executable file
292
install.sh
Executable file
@@ -0,0 +1,292 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# SPARK Installer Script
|
||||||
|
# Downloads and installs pre-compiled SPARK binary from releases
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# Direct piped installation (recommended):
|
||||||
|
# curl -sSL https://code.foss.global/serve.zone/spark/raw/branch/main/install.sh | sudo bash
|
||||||
|
#
|
||||||
|
# With version specification:
|
||||||
|
# curl -sSL https://code.foss.global/serve.zone/spark/raw/branch/main/install.sh | sudo bash -s -- --version v1.2.2
|
||||||
|
#
|
||||||
|
# Options:
|
||||||
|
# -h, --help Show this help message
|
||||||
|
# --version VERSION Install specific version (e.g., v1.2.2)
|
||||||
|
# --install-dir DIR Installation directory (default: /opt/spark)
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Default values
|
||||||
|
SHOW_HELP=0
|
||||||
|
SPECIFIED_VERSION=""
|
||||||
|
INSTALL_DIR="/opt/spark"
|
||||||
|
GITEA_BASE_URL="https://code.foss.global"
|
||||||
|
GITEA_REPO="serve.zone/spark"
|
||||||
|
|
||||||
|
# Parse command line arguments
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
-h|--help)
|
||||||
|
SHOW_HELP=1
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--version)
|
||||||
|
SPECIFIED_VERSION="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--install-dir)
|
||||||
|
INSTALL_DIR="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unknown option: $1"
|
||||||
|
echo "Use -h or --help for usage information"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ $SHOW_HELP -eq 1 ]; then
|
||||||
|
echo "SPARK Installer Script"
|
||||||
|
echo "Downloads and installs pre-compiled SPARK binary"
|
||||||
|
echo ""
|
||||||
|
echo "Usage: $0 [options]"
|
||||||
|
echo ""
|
||||||
|
echo "Options:"
|
||||||
|
echo " -h, --help Show this help message"
|
||||||
|
echo " --version VERSION Install specific version (e.g., v1.2.2)"
|
||||||
|
echo " --install-dir DIR Installation directory (default: /opt/spark)"
|
||||||
|
echo ""
|
||||||
|
echo "Examples:"
|
||||||
|
echo " # Install latest version"
|
||||||
|
echo " curl -sSL https://code.foss.global/serve.zone/spark/raw/branch/main/install.sh | sudo bash"
|
||||||
|
echo ""
|
||||||
|
echo " # Install specific version"
|
||||||
|
echo " curl -sSL https://code.foss.global/serve.zone/spark/raw/branch/main/install.sh | sudo bash -s -- --version v1.2.2"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if running as root
|
||||||
|
if [ "$EUID" -ne 0 ]; then
|
||||||
|
echo "Please run as root (sudo bash install.sh or pipe to sudo bash)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Helper function to detect OS and architecture
|
||||||
|
detect_platform() {
|
||||||
|
local os=$(uname -s)
|
||||||
|
local arch=$(uname -m)
|
||||||
|
|
||||||
|
# Map OS
|
||||||
|
case "$os" in
|
||||||
|
Linux)
|
||||||
|
os_name="linux"
|
||||||
|
;;
|
||||||
|
Darwin)
|
||||||
|
os_name="macos"
|
||||||
|
;;
|
||||||
|
MINGW*|MSYS*|CYGWIN*)
|
||||||
|
os_name="windows"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Error: Unsupported operating system: $os"
|
||||||
|
echo "Supported: Linux, macOS, Windows"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# Map architecture
|
||||||
|
case "$arch" in
|
||||||
|
x86_64|amd64)
|
||||||
|
arch_name="x64"
|
||||||
|
;;
|
||||||
|
aarch64|arm64)
|
||||||
|
arch_name="arm64"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Error: Unsupported architecture: $arch"
|
||||||
|
echo "Supported: x86_64/amd64 (x64), aarch64/arm64 (arm64)"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# Construct binary name
|
||||||
|
if [ "$os_name" = "windows" ]; then
|
||||||
|
echo "spark-${os_name}-${arch_name}.exe"
|
||||||
|
else
|
||||||
|
echo "spark-${os_name}-${arch_name}"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get latest release version from Gitea API
|
||||||
|
get_latest_version() {
|
||||||
|
echo "Fetching latest release version from Gitea..." >&2
|
||||||
|
|
||||||
|
local api_url="${GITEA_BASE_URL}/api/v1/repos/${GITEA_REPO}/releases/latest"
|
||||||
|
local response=$(curl -sSL "$api_url" 2>/dev/null)
|
||||||
|
|
||||||
|
if [ $? -ne 0 ] || [ -z "$response" ]; then
|
||||||
|
echo "Error: Failed to fetch latest release information from Gitea API" >&2
|
||||||
|
echo "URL: $api_url" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Extract tag_name from JSON response
|
||||||
|
local version=$(echo "$response" | grep -o '"tag_name":"[^"]*"' | cut -d'"' -f4)
|
||||||
|
|
||||||
|
if [ -z "$version" ]; then
|
||||||
|
echo "Error: Could not determine latest version from API response" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "$version"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main installation process
|
||||||
|
echo "================================================"
|
||||||
|
echo " SPARK Installation Script"
|
||||||
|
echo "================================================"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Detect platform
|
||||||
|
BINARY_NAME=$(detect_platform)
|
||||||
|
echo "Detected platform: $BINARY_NAME"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Determine version to install
|
||||||
|
if [ -n "$SPECIFIED_VERSION" ]; then
|
||||||
|
VERSION="$SPECIFIED_VERSION"
|
||||||
|
echo "Installing specified version: $VERSION"
|
||||||
|
else
|
||||||
|
VERSION=$(get_latest_version)
|
||||||
|
echo "Installing latest version: $VERSION"
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Construct download URL
|
||||||
|
DOWNLOAD_URL="${GITEA_BASE_URL}/${GITEA_REPO}/releases/download/${VERSION}/${BINARY_NAME}"
|
||||||
|
echo "Download URL: $DOWNLOAD_URL"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Check if service is running and stop it
|
||||||
|
SERVICE_WAS_RUNNING=0
|
||||||
|
if systemctl is-enabled --quiet spark 2>/dev/null || systemctl is-active --quiet spark 2>/dev/null; then
|
||||||
|
SERVICE_WAS_RUNNING=1
|
||||||
|
if systemctl is-active --quiet spark 2>/dev/null; then
|
||||||
|
echo "Stopping SPARK service..."
|
||||||
|
systemctl stop spark
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Also check for smartdaemon_spark service (legacy)
|
||||||
|
if systemctl is-enabled --quiet smartdaemon_spark 2>/dev/null || systemctl is-active --quiet smartdaemon_spark 2>/dev/null; then
|
||||||
|
if systemctl is-active --quiet smartdaemon_spark 2>/dev/null; then
|
||||||
|
echo "Stopping legacy smartdaemon_spark service..."
|
||||||
|
systemctl stop smartdaemon_spark
|
||||||
|
systemctl disable smartdaemon_spark 2>/dev/null
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean installation directory - ensure only binary exists
|
||||||
|
if [ -d "$INSTALL_DIR" ]; then
|
||||||
|
echo "Cleaning installation directory: $INSTALL_DIR"
|
||||||
|
rm -rf "$INSTALL_DIR"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create fresh installation directory
|
||||||
|
echo "Creating installation directory: $INSTALL_DIR"
|
||||||
|
mkdir -p "$INSTALL_DIR"
|
||||||
|
|
||||||
|
# Download binary
|
||||||
|
echo "Downloading SPARK binary..."
|
||||||
|
TEMP_FILE="$INSTALL_DIR/spark.download"
|
||||||
|
curl -sSL "$DOWNLOAD_URL" -o "$TEMP_FILE"
|
||||||
|
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
echo "Error: Failed to download binary from $DOWNLOAD_URL"
|
||||||
|
echo ""
|
||||||
|
echo "Please check:"
|
||||||
|
echo " 1. Your internet connection"
|
||||||
|
echo " 2. The specified version exists: ${GITEA_BASE_URL}/${GITEA_REPO}/releases"
|
||||||
|
echo " 3. The platform binary is available for this release"
|
||||||
|
rm -f "$TEMP_FILE"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if download was successful (file exists and not empty)
|
||||||
|
if [ ! -s "$TEMP_FILE" ]; then
|
||||||
|
echo "Error: Downloaded file is empty or does not exist"
|
||||||
|
rm -f "$TEMP_FILE"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Move to final location
|
||||||
|
BINARY_PATH="$INSTALL_DIR/spark"
|
||||||
|
mv "$TEMP_FILE" "$BINARY_PATH"
|
||||||
|
|
||||||
|
if [ $? -ne 0 ] || [ ! -f "$BINARY_PATH" ]; then
|
||||||
|
echo "Error: Failed to move binary to $BINARY_PATH"
|
||||||
|
rm -f "$TEMP_FILE" 2>/dev/null
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Make executable
|
||||||
|
chmod +x "$BINARY_PATH"
|
||||||
|
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
echo "Error: Failed to make binary executable"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Binary installed successfully to: $BINARY_PATH"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Check if /usr/local/bin is in PATH
|
||||||
|
if [[ ":$PATH:" == *":/usr/local/bin:"* ]]; then
|
||||||
|
BIN_DIR="/usr/local/bin"
|
||||||
|
else
|
||||||
|
BIN_DIR="/usr/bin"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create symlink for global access
|
||||||
|
ln -sf "$BINARY_PATH" "$BIN_DIR/spark"
|
||||||
|
echo "Symlink created: $BIN_DIR/spark -> $BINARY_PATH"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Restart service if it was running before update
|
||||||
|
if [ $SERVICE_WAS_RUNNING -eq 1 ]; then
|
||||||
|
echo "Restarting SPARK service..."
|
||||||
|
systemctl start spark
|
||||||
|
echo "Service restarted successfully."
|
||||||
|
echo ""
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "================================================"
|
||||||
|
echo " SPARK Installation Complete!"
|
||||||
|
echo "================================================"
|
||||||
|
echo ""
|
||||||
|
echo "Installation details:"
|
||||||
|
echo " Binary location: $BINARY_PATH"
|
||||||
|
echo " Symlink location: $BIN_DIR/spark"
|
||||||
|
echo " Version: $VERSION"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Check if configuration exists
|
||||||
|
if [ -f "/etc/spark/config.json" ]; then
|
||||||
|
echo "Configuration: /etc/spark/config.json (preserved)"
|
||||||
|
echo ""
|
||||||
|
echo "Your existing configuration has been preserved."
|
||||||
|
if [ $SERVICE_WAS_RUNNING -eq 1 ]; then
|
||||||
|
echo "The service has been restarted with your current settings."
|
||||||
|
else
|
||||||
|
echo "Start the service with: sudo spark installdaemon"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "Get started:"
|
||||||
|
echo " spark --version"
|
||||||
|
echo " spark help"
|
||||||
|
echo " spark installdaemon # Install as system daemon"
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
46
mod.ts
Normal file
46
mod.ts
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
#!/usr/bin/env -S deno run --allow-all
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Spark - Server Configuration and Management Tool
|
||||||
|
*
|
||||||
|
* A comprehensive tool for maintaining and configuring servers, integrating
|
||||||
|
* with Docker and supporting advanced task scheduling, targeted at the Servezone
|
||||||
|
* infrastructure and used by @serve.zone/cloudly as a cluster node server system manager.
|
||||||
|
*
|
||||||
|
* Required Permissions:
|
||||||
|
* - --allow-net: API communication, Docker access
|
||||||
|
* - --allow-read: Configuration files, project files
|
||||||
|
* - --allow-write: Logs, configuration updates
|
||||||
|
* - --allow-run: systemctl, Docker commands
|
||||||
|
* - --allow-env: Environment variables
|
||||||
|
* - --allow-sys: System information
|
||||||
|
*
|
||||||
|
* @module
|
||||||
|
*/
|
||||||
|
|
||||||
|
import * as cli from './ts/spark.cli.ts';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main entry point for the Spark application
|
||||||
|
* Sets up the CLI environment and executes the requested command
|
||||||
|
*/
|
||||||
|
async function main(): Promise<void> {
|
||||||
|
// Set environment variable to indicate CLI call
|
||||||
|
Deno.env.set('CLI_CALL', 'true');
|
||||||
|
|
||||||
|
// Execute the CLI
|
||||||
|
await cli.runCli();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute main and handle errors
|
||||||
|
if (import.meta.main) {
|
||||||
|
try {
|
||||||
|
await main();
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error: ${error instanceof Error ? error.message : String(error)}`);
|
||||||
|
Deno.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export for library usage
|
||||||
|
export * from './ts/spark.classes.spark.ts';
|
||||||
@@ -1,39 +1 @@
|
|||||||
{
|
{}
|
||||||
"gitzone": {
|
|
||||||
"projectType": "npm",
|
|
||||||
"module": {
|
|
||||||
"githost": "gitlab.com",
|
|
||||||
"gitscope": "losslessone/services/initzone",
|
|
||||||
"gitrepo": "spark",
|
|
||||||
"description": "A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure.",
|
|
||||||
"npmPackagename": "@losslessone_private/spark",
|
|
||||||
"license": "MIT",
|
|
||||||
"projectDomain": "https://lossless.one",
|
|
||||||
"keywords": [
|
|
||||||
"server management",
|
|
||||||
"devops",
|
|
||||||
"automation",
|
|
||||||
"docker",
|
|
||||||
"configuration management",
|
|
||||||
"daemon service",
|
|
||||||
"continuous integration",
|
|
||||||
"continuous deployment",
|
|
||||||
"deployment automation",
|
|
||||||
"service orchestration",
|
|
||||||
"node.js",
|
|
||||||
"task scheduling",
|
|
||||||
"CLI",
|
|
||||||
"logging",
|
|
||||||
"server maintenance"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"npmci": {
|
|
||||||
"npmGlobalTools": [],
|
|
||||||
"npmAccessLevel": "private",
|
|
||||||
"npmRegistryUrl": "verdaccio.lossless.one"
|
|
||||||
},
|
|
||||||
"tsdoc": {
|
|
||||||
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
111
package.json
111
package.json
@@ -1,61 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "@serve.zone/spark",
|
"name": "@serve.zone/spark",
|
||||||
"version": "1.0.89",
|
"version": "1.2.3",
|
||||||
"private": false,
|
"description": "A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure and used by @serve.zone/cloudly as a cluster node server system manager.",
|
||||||
"description": "A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure.",
|
|
||||||
"main": "dist_ts/index.js",
|
|
||||||
"typings": "dist_ts/index.d.ts",
|
|
||||||
"author": "Task Venture Capital GmbH",
|
|
||||||
"license": "MIT",
|
|
||||||
"scripts": {
|
|
||||||
"test": "(tstest test/ --web)",
|
|
||||||
"build": "(tsbuild --web --allowimplicitany)",
|
|
||||||
"buildDocs": "tsdoc"
|
|
||||||
},
|
|
||||||
"bin": {
|
|
||||||
"spark": "./cli.js"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@git.zone/tsbuild": "^2.1.80",
|
|
||||||
"@git.zone/tsrun": "^1.2.39",
|
|
||||||
"@git.zone/tstest": "^1.0.60",
|
|
||||||
"@push.rocks/tapbundle": "^5.0.4",
|
|
||||||
"@types/node": "20.14.2"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@apiclient.xyz/docker": "^1.2.2",
|
|
||||||
"@push.rocks/npmextra": "^5.0.17",
|
|
||||||
"@push.rocks/projectinfo": "^5.0.1",
|
|
||||||
"@push.rocks/qenv": "^6.0.5",
|
|
||||||
"@push.rocks/smartcli": "^4.0.11",
|
|
||||||
"@push.rocks/smartdaemon": "^2.0.3",
|
|
||||||
"@push.rocks/smartdelay": "^3.0.5",
|
|
||||||
"@push.rocks/smartfile": "^11.0.20",
|
|
||||||
"@push.rocks/smartjson": "^5.0.20",
|
|
||||||
"@push.rocks/smartlog": "^3.0.7",
|
|
||||||
"@push.rocks/smartlog-destination-local": "^9.0.0",
|
|
||||||
"@push.rocks/smartpath": "^5.0.5",
|
|
||||||
"@push.rocks/smartshell": "^3.0.5",
|
|
||||||
"@push.rocks/smartupdate": "^2.0.4",
|
|
||||||
"@push.rocks/taskbuffer": "^3.0.10",
|
|
||||||
"@serve.zone/interfaces": "^1.0.74"
|
|
||||||
},
|
|
||||||
"files": [
|
|
||||||
"ts/**/*",
|
|
||||||
"ts_web/**/*",
|
|
||||||
"dist/**/*",
|
|
||||||
"dist_*/**/*",
|
|
||||||
"dist_ts/**/*",
|
|
||||||
"dist_ts_web/**/*",
|
|
||||||
"assets/**/*",
|
|
||||||
"cli.js",
|
|
||||||
"npmextra.json",
|
|
||||||
"readme.md"
|
|
||||||
],
|
|
||||||
"browserslist": [
|
|
||||||
"last 1 chrome versions"
|
|
||||||
],
|
|
||||||
"type": "module",
|
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"server management",
|
"server management",
|
||||||
"devops",
|
"devops",
|
||||||
@@ -67,10 +13,57 @@
|
|||||||
"continuous deployment",
|
"continuous deployment",
|
||||||
"deployment automation",
|
"deployment automation",
|
||||||
"service orchestration",
|
"service orchestration",
|
||||||
"node.js",
|
"deno",
|
||||||
"task scheduling",
|
"task scheduling",
|
||||||
"CLI",
|
"CLI",
|
||||||
"logging",
|
"logging",
|
||||||
"server maintenance"
|
"server maintenance",
|
||||||
]
|
"serve.zone",
|
||||||
|
"cluster management",
|
||||||
|
"system manager",
|
||||||
|
"server configuration"
|
||||||
|
],
|
||||||
|
"homepage": "https://code.foss.global/serve.zone/spark",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://code.foss.global/serve.zone/spark/issues"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://code.foss.global/serve.zone/spark.git"
|
||||||
|
},
|
||||||
|
"author": "Serve Zone",
|
||||||
|
"license": "MIT",
|
||||||
|
"type": "module",
|
||||||
|
"bin": {
|
||||||
|
"spark": "./bin/spark-wrapper.js"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"postinstall": "node scripts/install-binary.js",
|
||||||
|
"prepublishOnly": "echo 'Publishing SPARK binaries to npm...'",
|
||||||
|
"test": "echo 'Tests are run with Deno: deno task test'",
|
||||||
|
"build": "echo 'no build needed'"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"bin/",
|
||||||
|
"scripts/install-binary.js",
|
||||||
|
"readme.md",
|
||||||
|
"license",
|
||||||
|
"changelog.md"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14.0.0"
|
||||||
|
},
|
||||||
|
"os": [
|
||||||
|
"darwin",
|
||||||
|
"linux",
|
||||||
|
"win32"
|
||||||
|
],
|
||||||
|
"cpu": [
|
||||||
|
"x64",
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"publishConfig": {
|
||||||
|
"access": "public",
|
||||||
|
"registry": "https://registry.npmjs.org/"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
7606
pnpm-lock.yaml
generated
7606
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -1,2 +1,2 @@
|
|||||||
- this package is part of serve.zone and that spark's main purpose is the be used by @serve.zone/cloudly as a cluster node server system manager.
|
- make sure to mention that this package is part of serve.zone and **spark's main purpose** is the be used by @serve.zone/cloudly as a cluster node server system manager.
|
||||||
- it is used to maintain and configure servers on the base OS level
|
- it is used to maintain and configure servers on the base OS level
|
||||||
|
|||||||
751
readme.md
751
readme.md
@@ -1,291 +1,556 @@
|
|||||||
# @serve.zone/spark
|
# @serve.zone/spark 🔥
|
||||||
A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure.
|
|
||||||
|
|
||||||
## Install
|
> **A powerful Deno-powered server management tool for the modern infrastructure**
|
||||||
To install `@serve.zone/spark`, run the following command in your terminal:
|
|
||||||
|
|
||||||
```sh
|
Spark is a comprehensive tool for maintaining and configuring servers at the OS level, with deep Docker integration and advanced task scheduling capabilities. Built for the serve.zone infrastructure, Spark serves as the backbone for [@serve.zone/cloudly](https://code.foss.global/serve.zone/cloudly) cluster management, handling everything from daemon orchestration to container lifecycle management.
|
||||||
npm install @serve.zone/spark --save
|
|
||||||
|
## ✨ Features
|
||||||
|
|
||||||
|
- 🚀 **Standalone Binary** - No runtime dependencies, just download and run
|
||||||
|
- 🐳 **Docker Integration** - Native support for Docker services, stacks, secrets, and networks
|
||||||
|
- ⚙️ **Daemon Management** - Systemd integration for reliable service operation
|
||||||
|
- 📅 **Task Scheduling** - Cron-like task scheduling for automation
|
||||||
|
- 🔄 **Auto-Updates** - Self-updating capabilities for zero-downtime deployments
|
||||||
|
- 🔐 **Secure Secrets** - Docker secrets management for sensitive data
|
||||||
|
- 📊 **Comprehensive Logging** - Built-in logging with multiple severity levels
|
||||||
|
- 🎯 **Mode Support** - Cloudly and CoreFlow node operation modes
|
||||||
|
|
||||||
|
## 🚀 Installation
|
||||||
|
|
||||||
|
### Quick Install (Recommended)
|
||||||
|
|
||||||
|
Install the latest version via our installation script:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -sSL https://code.foss.global/serve.zone/spark/raw/branch/master/install.sh | sudo bash
|
||||||
```
|
```
|
||||||
|
|
||||||
Ensure you have both Node.js and npm installed on your machine.
|
### npm Install
|
||||||
|
|
||||||
## Usage
|
Install via npm (automatically downloads the correct binary for your platform):
|
||||||
|
|
||||||
### Getting Started
|
```bash
|
||||||
To use `@serve.zone/spark` in your project, you need to include and initiate it in your TypeScript project. Ensure you have TypeScript and the necessary build tools set up in your project.
|
npm install -g @serve.zone/spark
|
||||||
|
|
||||||
First, import `@serve.zone/spark`:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { Spark } from '@serve.zone/spark';
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Initializing Spark
|
### Specific Version
|
||||||
Create an instance of the `Spark` class to start using Spark. This instance will serve as the main entry point for interacting with the Spark functionalities.
|
|
||||||
|
|
||||||
```typescript
|
```bash
|
||||||
const sparkInstance = new Spark();
|
curl -sSL https://code.foss.global/serve.zone/spark/raw/branch/master/install.sh | sudo bash -s -- --version v1.2.2
|
||||||
```
|
```
|
||||||
|
|
||||||
### Running Spark as a Daemon
|
### Manual Installation
|
||||||
To run Spark as a daemon, which is useful for maintaining and configuring servers at the OS level, you can use the CLI feature bundled with Spark. This should ideally be handled outside of your code through a command-line terminal but can also be automated within your Node.js scripts if required.
|
|
||||||
|
|
||||||
```shell
|
Download the binary for your platform from the [releases page](https://code.foss.global/serve.zone/spark/releases) and make it executable:
|
||||||
spark installdaemon
|
|
||||||
|
```bash
|
||||||
|
# Example for Linux x64
|
||||||
|
wget https://code.foss.global/serve.zone/spark/releases/download/v1.2.2/spark-linux-x64
|
||||||
|
chmod +x spark-linux-x64
|
||||||
|
sudo mv spark-linux-x64 /usr/local/bin/spark
|
||||||
```
|
```
|
||||||
|
|
||||||
The command above sets up Spark as a system service, enabling it to run and maintain server configurations automatically.
|
### Supported Platforms
|
||||||
|
|
||||||
### Updating Spark or Maintained Services
|
- 🐧 Linux (x86_64, ARM64)
|
||||||
Spark can self-update and manage updates for its maintained services. Trigger an update check and process by calling the `updateServices` method on the Spark instance.
|
- 🍎 macOS (Intel, Apple Silicon)
|
||||||
|
- 🪟 Windows (x86_64)
|
||||||
|
|
||||||
```typescript
|
## 🎯 Quick Start
|
||||||
await sparkInstance.sparkUpdateManager.updateServices();
|
|
||||||
|
### Install as System Daemon
|
||||||
|
|
||||||
|
Set up Spark to run as a systemd service:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo spark installdaemon
|
||||||
```
|
```
|
||||||
|
|
||||||
### Managing Configuration and Logging
|
This command:
|
||||||
Spark allows for extensive configuration and logging customization. Use the `SparkLocalConfig` and logging features to tailor Spark's operation to your needs.
|
- Creates a systemd service unit
|
||||||
|
- Enables automatic startup on boot
|
||||||
|
- Starts the Spark daemon immediately
|
||||||
|
|
||||||
```typescript
|
### Configure Operation Mode
|
||||||
// Accessing the local configuration
|
|
||||||
const localConfig = sparkInstance.sparkLocalConfig;
|
|
||||||
|
|
||||||
// Utilizing the logger for custom log messages
|
Spark supports different operation modes for various use cases:
|
||||||
import { logger } from '@serve.zone/spark';
|
|
||||||
|
|
||||||
logger.log('info', 'Custom log message');
|
```bash
|
||||||
|
# For Cloudly cluster management
|
||||||
|
sudo spark asdaemon --mode cloudly
|
||||||
|
|
||||||
|
# For CoreFlow node management
|
||||||
|
sudo spark asdaemon --mode coreflow-node
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### View Logs
|
||||||
|
|
||||||
|
Monitor Spark daemon activity in real-time:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo spark logs
|
||||||
|
```
|
||||||
|
|
||||||
|
## 📖 CLI Reference
|
||||||
|
|
||||||
|
### Core Commands
|
||||||
|
|
||||||
|
#### `spark installdaemon`
|
||||||
|
Installs Spark as a system daemon service. This sets up a systemd unit that automatically starts on boot.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo spark installdaemon
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `spark updatedaemon`
|
||||||
|
Updates the daemon service configuration to the current Spark version.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo spark updatedaemon
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `spark asdaemon [--mode MODE]`
|
||||||
|
Runs Spark in daemon mode. Requires a mode to be specified (either via `--mode` flag or from saved configuration).
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo spark asdaemon --mode cloudly
|
||||||
|
```
|
||||||
|
|
||||||
|
**Available modes:**
|
||||||
|
- `cloudly` - Manages Cloudly services
|
||||||
|
- `coreflow-node` - Manages CoreFlow node services
|
||||||
|
|
||||||
|
#### `spark logs`
|
||||||
|
Displays real-time logs from the Spark daemon service.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo spark logs
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `spark prune`
|
||||||
|
Performs a complete cleanup of Docker resources and restarts services. Use with caution!
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo spark prune
|
||||||
|
```
|
||||||
|
|
||||||
|
This command:
|
||||||
|
1. Stops the Spark daemon
|
||||||
|
2. Removes all Docker stacks
|
||||||
|
3. Removes all Docker services
|
||||||
|
4. Removes all Docker secrets
|
||||||
|
5. Removes specified Docker networks
|
||||||
|
6. Prunes the Docker system
|
||||||
|
7. Restarts Docker
|
||||||
|
8. Restarts the Spark daemon
|
||||||
|
|
||||||
### Advanced Usage
|
### Advanced Usage
|
||||||
`@serve.zone/spark` offers tools for detailed server and service management, including but not limited to task scheduling, daemon management, and service updates. Explore the `SparkTaskManager` for scheduling specific tasks, `SparkUpdateManager` for handling service updates, and `SparkLocalConfig` for configuration.
|
|
||||||
|
|
||||||
### Example: Scheduling Custom Tasks
|
#### Check Version
|
||||||
|
|
||||||
|
```bash
|
||||||
|
spark --version
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Get Help
|
||||||
|
|
||||||
|
```bash
|
||||||
|
spark help
|
||||||
|
```
|
||||||
|
|
||||||
|
## 🔧 Programmatic Usage
|
||||||
|
|
||||||
|
While Spark is primarily designed as a CLI tool and daemon, you can also use it as a library in your Deno projects.
|
||||||
|
|
||||||
|
### Import from Deno
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
import { SparkTaskManager } from '@serve.zone/spark';
|
import { Spark } from 'https://code.foss.global/serve.zone/spark/raw/branch/master/mod.ts';
|
||||||
|
```
|
||||||
|
|
||||||
const sparkInstance = new Spark();
|
### Basic Usage
|
||||||
const myTask = {
|
|
||||||
name: 'customTask',
|
```typescript
|
||||||
|
import { Spark } from './mod.ts';
|
||||||
|
|
||||||
|
// Create a Spark instance
|
||||||
|
const spark = new Spark();
|
||||||
|
|
||||||
|
// Start the daemon programmatically
|
||||||
|
await spark.daemonStart();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Task Scheduling
|
||||||
|
|
||||||
|
Schedule automated tasks using the built-in task manager:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { Spark } from './mod.ts';
|
||||||
|
|
||||||
|
const spark = new Spark();
|
||||||
|
|
||||||
|
// Define a custom task
|
||||||
|
const backupTask = {
|
||||||
|
name: 'daily-backup',
|
||||||
taskFunction: async () => {
|
taskFunction: async () => {
|
||||||
console.log('Running custom task');
|
console.log('Running backup...');
|
||||||
|
// Your backup logic here
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
sparkInstance.sparkTaskManager.taskmanager.addAndScheduleTask(myTask, '* * * * * *');
|
// Schedule it to run daily at 2 AM
|
||||||
```
|
spark.sparkTaskManager.taskmanager.addAndScheduleTask(
|
||||||
|
backupTask,
|
||||||
The example above creates a simple task that logs a message every second, demonstrating how to use Spark's task manager for custom scheduled tasks.
|
'0 2 * * *'
|
||||||
|
|
||||||
### Detailed Service Management
|
|
||||||
For advanced configurations, including Docker and service management, you can utilize the following patterns:
|
|
||||||
|
|
||||||
- Use `SparkUpdateManager` to handle Docker image updates, service creation, and management.
|
|
||||||
- Access and modify Docker and service configurations through Spark's integration with configuration files and environment variables.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Managing Docker services with Spark
|
|
||||||
await sparkInstance.sparkUpdateManager.dockerHost.someDockerMethod();
|
|
||||||
|
|
||||||
// Example: Creating a Docker service
|
|
||||||
const newServiceDefinition = {...};
|
|
||||||
await sparkInstance.sparkUpdateManager.createService(newServiceDefinition);
|
|
||||||
```
|
|
||||||
|
|
||||||
### CLI Commands
|
|
||||||
Spark provides several CLI commands to interact with and manage the system services:
|
|
||||||
|
|
||||||
#### Installing Spark as a Daemon
|
|
||||||
```shell
|
|
||||||
spark installdaemon
|
|
||||||
```
|
|
||||||
|
|
||||||
Sets up Spark as a system service to maintain server configurations automatically.
|
|
||||||
|
|
||||||
#### Updating the Daemon
|
|
||||||
```shell
|
|
||||||
spark updatedaemon
|
|
||||||
```
|
|
||||||
|
|
||||||
Updates the daemon service if a new version is available.
|
|
||||||
|
|
||||||
#### Running Spark as Daemon
|
|
||||||
```shell
|
|
||||||
spark asdaemon
|
|
||||||
```
|
|
||||||
|
|
||||||
Runs Spark in daemon mode, which is suitable for executing automated tasks.
|
|
||||||
|
|
||||||
#### Viewing Logs
|
|
||||||
```shell
|
|
||||||
spark logs
|
|
||||||
```
|
|
||||||
|
|
||||||
Views the logs of the Spark daemon service.
|
|
||||||
|
|
||||||
#### Cleaning Up Services
|
|
||||||
```shell
|
|
||||||
spark prune
|
|
||||||
```
|
|
||||||
|
|
||||||
Stops and cleans up all Docker services (stacks, networks, secrets, etc.) and prunes the Docker system.
|
|
||||||
|
|
||||||
### Programmatic Daemon Management
|
|
||||||
You can also manage the daemon programmatically:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { SmartDaemon } from '@push.rocks/smartdaemon';
|
|
||||||
import { Spark } from '@serve.zone/spark';
|
|
||||||
|
|
||||||
const sparkInstance = new Spark();
|
|
||||||
const smartDaemon = new SmartDaemon();
|
|
||||||
|
|
||||||
const startDaemon = async () => {
|
|
||||||
const sparkService = await smartDaemon.addService({
|
|
||||||
name: 'spark',
|
|
||||||
version: sparkInstance.sparkInfo.projectInfo.version,
|
|
||||||
command: 'spark asdaemon',
|
|
||||||
description: 'Spark daemon service',
|
|
||||||
workingDir: '/path/to/project',
|
|
||||||
});
|
|
||||||
await sparkService.save();
|
|
||||||
await sparkService.enable();
|
|
||||||
await sparkService.start();
|
|
||||||
};
|
|
||||||
|
|
||||||
const updateDaemon = async () => {
|
|
||||||
const sparkService = await smartDaemon.addService({
|
|
||||||
name: 'spark',
|
|
||||||
version: sparkInstance.sparkInfo.projectInfo.version,
|
|
||||||
command: 'spark asdaemon',
|
|
||||||
description: 'Spark daemon service',
|
|
||||||
workingDir: '/path/to/project',
|
|
||||||
});
|
|
||||||
await sparkService.reload();
|
|
||||||
};
|
|
||||||
|
|
||||||
startDaemon();
|
|
||||||
updateDaemon();
|
|
||||||
```
|
|
||||||
|
|
||||||
This illustrates how to initiate and update the Spark daemon using the `SmartDaemon` class from `@push.rocks/smartdaemon`.
|
|
||||||
|
|
||||||
### Configuration Management
|
|
||||||
Extensive configuration management is possible through the `SparkLocalConfig` and other configuration classes. This feature allows you to make your application's behavior adaptable based on different environments and requirements.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Example on setting local config
|
|
||||||
import { SparkLocalConfig } from '@serve.zone/spark';
|
|
||||||
|
|
||||||
const localConfig = new SparkLocalConfig(sparkInstance);
|
|
||||||
await localConfig.kvStore.set('someKey', 'someValue');
|
|
||||||
|
|
||||||
// Retrieving a value from local config
|
|
||||||
const someConfigValue = await localConfig.kvStore.get('someKey');
|
|
||||||
|
|
||||||
console.log(someConfigValue); // Outputs: someValue
|
|
||||||
```
|
|
||||||
|
|
||||||
### Detailed Log Management
|
|
||||||
Logging is a crucial aspect of any automation tool, and `@serve.zone/spark` offers rich logging functionality through its built-in logging library.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { logger, Spark } from '@serve.zone/spark';
|
|
||||||
|
|
||||||
const sparkInstance = new Spark();
|
|
||||||
|
|
||||||
logger.log('info', 'Spark instance created.');
|
|
||||||
|
|
||||||
// Using logger in various levels of severity
|
|
||||||
logger.log('debug', 'This is a debug message');
|
|
||||||
logger.log('warn', 'This is a warning message');
|
|
||||||
logger.log('error', 'This is an error message');
|
|
||||||
logger.log('ok', 'This is a success message');
|
|
||||||
```
|
|
||||||
|
|
||||||
### Real-World Scenarios
|
|
||||||
|
|
||||||
#### Automated System Update and Restart
|
|
||||||
In real-world scenarios, you might want to automate system updates and reboots to ensure your services are running the latest security patches and features.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { Spark } from '@serve.zone/spark';
|
|
||||||
import { SmartShell } from '@push.rocks/smartshell';
|
|
||||||
|
|
||||||
const sparkInstance = new Spark();
|
|
||||||
const shell = new SmartShell({ executor: 'bash' });
|
|
||||||
|
|
||||||
const updateAndRestart = async () => {
|
|
||||||
await shell.exec('apt-get update && apt-get upgrade -y');
|
|
||||||
console.log('System updated.');
|
|
||||||
await shell.exec('reboot');
|
|
||||||
};
|
|
||||||
|
|
||||||
sparkInstance.sparkTaskManager.taskmanager.addAndScheduleTask(
|
|
||||||
{ name: 'updateAndRestart', taskFunction: updateAndRestart },
|
|
||||||
'0 3 * * 7' // Every Sunday at 3 AM
|
|
||||||
);
|
);
|
||||||
```
|
```
|
||||||
|
|
||||||
This example demonstrates creating and scheduling a task to update and restart the server every Sunday at 3 AM using Spark's task management capabilities.
|
### Service Management
|
||||||
|
|
||||||
#### Integrating with Docker for Service Deployment
|
Manage Docker services programmatically:
|
||||||
Spark's tight integration with Docker makes it an excellent tool for deploying containerized applications across your infrastructure.
|
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
import { Spark } from '@serve.zone/spark';
|
import { Spark } from './mod.ts';
|
||||||
import { DockerHost } from '@apiclient.xyz/docker';
|
|
||||||
|
|
||||||
const sparkInstance = new Spark();
|
const spark = new Spark();
|
||||||
const dockerHost = new DockerHost({});
|
|
||||||
|
|
||||||
const deployService = async () => {
|
// Add a service to manage
|
||||||
const image = await dockerHost.pullImage('my-docker-repo/my-service:latest');
|
spark.sparkUpdateManager.services.push({
|
||||||
const newService = await dockerHost.createService({
|
name: 'my-app',
|
||||||
name: 'my-service',
|
image: 'code.foss.global/myorg/myapp',
|
||||||
image,
|
url: 'myapp',
|
||||||
ports: ['80:8080'],
|
environment: 'production',
|
||||||
environmentVariables: {
|
port: '3000',
|
||||||
NODE_ENV: 'production',
|
secretJson: {
|
||||||
|
API_KEY: 'secret-value',
|
||||||
|
DATABASE_URL: 'postgresql://...',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start managing services
|
||||||
|
await spark.sparkUpdateManager.start();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configuration Management
|
||||||
|
|
||||||
|
Access and modify Spark's configuration:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { Spark } from './mod.ts';
|
||||||
|
|
||||||
|
const spark = new Spark();
|
||||||
|
|
||||||
|
// Write configuration
|
||||||
|
await spark.sparkConfig.kvStore.writeKey('mode', 'cloudly');
|
||||||
|
|
||||||
|
// Read configuration
|
||||||
|
const mode = await spark.sparkConfig.kvStore.readKey('mode');
|
||||||
|
console.log(`Current mode: ${mode}`);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Logging
|
||||||
|
|
||||||
|
Use Spark's built-in logger for consistent logging:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { logger } from './ts/spark.logging.ts';
|
||||||
|
|
||||||
|
// Log at different levels
|
||||||
|
logger.log('info', 'Application starting...');
|
||||||
|
logger.log('ok', 'Service deployed successfully');
|
||||||
|
logger.log('warn', 'High memory usage detected');
|
||||||
|
logger.log('error', 'Failed to connect to database');
|
||||||
|
logger.log('success', 'Backup completed');
|
||||||
|
```
|
||||||
|
|
||||||
|
## 🏗️ Architecture
|
||||||
|
|
||||||
|
### Core Components
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────┐
|
||||||
|
│ Spark Instance │
|
||||||
|
├─────────────────────────────────────────┤
|
||||||
|
│ │
|
||||||
|
│ ┌─────────────────────────────────┐ │
|
||||||
|
│ │ SparkConfig │ │
|
||||||
|
│ │ - KV Store │ │
|
||||||
|
│ │ - Mode Configuration │ │
|
||||||
|
│ └─────────────────────────────────┘ │
|
||||||
|
│ │
|
||||||
|
│ ┌─────────────────────────────────┐ │
|
||||||
|
│ │ SparkTaskManager │ │
|
||||||
|
│ │ - Cron Scheduling │ │
|
||||||
|
│ │ - Task Execution │ │
|
||||||
|
│ └─────────────────────────────────┘ │
|
||||||
|
│ │
|
||||||
|
│ ┌─────────────────────────────────┐ │
|
||||||
|
│ │ SparkServicesManager │ │
|
||||||
|
│ │ - Docker Integration │ │
|
||||||
|
│ │ - Service Updates │ │
|
||||||
|
│ │ - Secret Management │ │
|
||||||
|
│ └─────────────────────────────────┘ │
|
||||||
|
│ │
|
||||||
|
│ ┌─────────────────────────────────┐ │
|
||||||
|
│ │ SmartDaemon │ │
|
||||||
|
│ │ - Systemd Integration │ │
|
||||||
|
│ │ - Service Lifecycle │ │
|
||||||
|
│ └─────────────────────────────────┘ │
|
||||||
|
│ │
|
||||||
|
└─────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
### Key Classes
|
||||||
|
|
||||||
|
- **`Spark`** - Main orchestrator class that coordinates all components
|
||||||
|
- **`SparkConfig`** - Handles configuration storage and retrieval
|
||||||
|
- **`SparkTaskManager`** - Manages scheduled tasks and automation
|
||||||
|
- **`SparkServicesManager`** - Manages Docker services and updates
|
||||||
|
- **`SparkInfo`** - Provides project and version information
|
||||||
|
|
||||||
|
## 🔄 Update Management
|
||||||
|
|
||||||
|
Spark includes self-updating capabilities:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { Spark } from './mod.ts';
|
||||||
|
|
||||||
|
const spark = new Spark();
|
||||||
|
|
||||||
|
// Check for and apply updates
|
||||||
|
await spark.sparkUpdateManager.updateServices();
|
||||||
|
```
|
||||||
|
|
||||||
|
The update manager:
|
||||||
|
- Pulls latest Docker images
|
||||||
|
- Manages service rollouts
|
||||||
|
- Handles zero-downtime deployments
|
||||||
|
- Manages Docker secrets securely
|
||||||
|
|
||||||
|
## 🐳 Docker Integration
|
||||||
|
|
||||||
|
### Service Definition
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const serviceDefinition = {
|
||||||
|
name: 'api-server',
|
||||||
|
image: 'code.foss.global/myorg/api',
|
||||||
|
url: 'api',
|
||||||
|
environment: 'production',
|
||||||
|
port: '8080',
|
||||||
|
secretJson: {
|
||||||
|
JWT_SECRET: 'your-jwt-secret',
|
||||||
|
DB_PASSWORD: 'your-db-password',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
spark.sparkUpdateManager.services.push(serviceDefinition);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Stack Management
|
||||||
|
|
||||||
|
Spark manages Docker stacks for complex multi-service deployments:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# View running stacks
|
||||||
|
docker stack ls
|
||||||
|
|
||||||
|
# Spark manages these automatically
|
||||||
|
```
|
||||||
|
|
||||||
|
## 🛠️ Development
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
- [Deno](https://deno.land/) v2.x or later
|
||||||
|
|
||||||
|
### Running from Source
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Clone the repository
|
||||||
|
git clone https://code.foss.global/serve.zone/spark.git
|
||||||
|
cd spark
|
||||||
|
|
||||||
|
# Run directly
|
||||||
|
deno run --allow-all mod.ts
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
deno test --allow-all test/
|
||||||
|
|
||||||
|
# Type check
|
||||||
|
deno check mod.ts
|
||||||
|
|
||||||
|
# Format code
|
||||||
|
deno fmt
|
||||||
|
|
||||||
|
# Lint
|
||||||
|
deno lint
|
||||||
|
```
|
||||||
|
|
||||||
|
### Building Binaries
|
||||||
|
|
||||||
|
Compile for all supported platforms:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bash scripts/compile-all.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
Binaries will be output to `dist/binaries/`.
|
||||||
|
|
||||||
|
### Compile for Specific Platform
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Linux x64
|
||||||
|
deno compile --allow-all --output spark-linux-x64 --target x86_64-unknown-linux-gnu mod.ts
|
||||||
|
|
||||||
|
# macOS ARM64
|
||||||
|
deno compile --allow-all --output spark-macos-arm64 --target aarch64-apple-darwin mod.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
## 🔐 Security
|
||||||
|
|
||||||
|
### Permissions
|
||||||
|
|
||||||
|
Spark requires the following Deno permissions:
|
||||||
|
|
||||||
|
- `--allow-net` - API communication, Docker socket access
|
||||||
|
- `--allow-read` - Configuration files, project files
|
||||||
|
- `--allow-write` - Logs, configuration updates
|
||||||
|
- `--allow-run` - systemctl, Docker commands
|
||||||
|
- `--allow-env` - Environment variables
|
||||||
|
- `--allow-sys` - System information
|
||||||
|
|
||||||
|
### Secrets Management
|
||||||
|
|
||||||
|
Always use Docker secrets for sensitive data:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const serviceWithSecrets = {
|
||||||
|
name: 'secure-app',
|
||||||
|
image: 'myapp:latest',
|
||||||
|
secretJson: {
|
||||||
|
API_KEY: Deno.env.get('API_KEY')!,
|
||||||
|
DB_PASSWORD: Deno.env.get('DB_PASSWORD')!,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
## 🐛 Troubleshooting
|
||||||
|
|
||||||
|
### Service Won't Start
|
||||||
|
|
||||||
|
Check the daemon status:
|
||||||
|
```bash
|
||||||
|
sudo systemctl status smartdaemon_spark
|
||||||
|
```
|
||||||
|
|
||||||
|
View recent logs:
|
||||||
|
```bash
|
||||||
|
sudo journalctl -u smartdaemon_spark -n 100
|
||||||
|
```
|
||||||
|
|
||||||
|
### Docker Issues
|
||||||
|
|
||||||
|
Verify Docker is running:
|
||||||
|
```bash
|
||||||
|
sudo systemctl status docker
|
||||||
|
```
|
||||||
|
|
||||||
|
Check Docker socket permissions:
|
||||||
|
```bash
|
||||||
|
sudo ls -la /var/run/docker.sock
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configuration Issues
|
||||||
|
|
||||||
|
Check current mode:
|
||||||
|
```bash
|
||||||
|
# Run spark programmatically
|
||||||
|
deno run --allow-all -e "
|
||||||
|
import { Spark } from './mod.ts';
|
||||||
|
const s = new Spark();
|
||||||
|
const mode = await s.sparkConfig.kvStore.readKey('mode');
|
||||||
|
console.log('Mode:', mode);
|
||||||
|
"
|
||||||
|
```
|
||||||
|
|
||||||
|
## 📝 Examples
|
||||||
|
|
||||||
|
### Automated System Maintenance
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { Spark } from './mod.ts';
|
||||||
|
|
||||||
|
const spark = new Spark();
|
||||||
|
|
||||||
|
// Schedule weekly system updates
|
||||||
|
const updateTask = {
|
||||||
|
name: 'system-update',
|
||||||
|
taskFunction: async () => {
|
||||||
|
const shell = new Deno.Command('bash', {
|
||||||
|
args: ['-c', 'apt-get update && apt-get upgrade -y'],
|
||||||
|
});
|
||||||
|
await shell.output();
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Every Sunday at 3 AM
|
||||||
|
spark.sparkTaskManager.taskmanager.addAndScheduleTask(
|
||||||
|
updateTask,
|
||||||
|
'0 3 * * 0'
|
||||||
|
);
|
||||||
|
|
||||||
|
await spark.daemonStart();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Multi-Service Deployment
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { Spark } from './mod.ts';
|
||||||
|
|
||||||
|
const spark = new Spark();
|
||||||
|
|
||||||
|
// Add multiple services
|
||||||
|
const services = [
|
||||||
|
{
|
||||||
|
name: 'frontend',
|
||||||
|
image: 'code.foss.global/myorg/frontend',
|
||||||
|
url: 'frontend',
|
||||||
|
port: '80',
|
||||||
|
environment: 'production',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'backend',
|
||||||
|
image: 'code.foss.global/myorg/backend',
|
||||||
|
url: 'backend',
|
||||||
|
port: '3000',
|
||||||
|
environment: 'production',
|
||||||
|
secretJson: {
|
||||||
|
DATABASE_URL: Deno.env.get('DATABASE_URL')!,
|
||||||
},
|
},
|
||||||
});
|
},
|
||||||
console.log(`Service ${newService.name} deployed.`);
|
{
|
||||||
};
|
name: 'worker',
|
||||||
|
image: 'code.foss.global/myorg/worker',
|
||||||
|
url: 'worker',
|
||||||
|
environment: 'production',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
deployService();
|
services.forEach(svc => spark.sparkUpdateManager.services.push(svc));
|
||||||
|
|
||||||
|
await spark.daemonStart();
|
||||||
```
|
```
|
||||||
|
|
||||||
This example demonstrates how to pull a Docker image and deploy it as a new service in your infrastructure using Spark's Docker integration.
|
## 🤝 Support
|
||||||
|
|
||||||
### Managing Secrets
|
For issues, questions, or contributions:
|
||||||
Managing secrets and sensitive data is crucial in any configuration and automation tool. Spark's integration with Docker allows you to handle secrets securely.
|
- 🐛 [Report Issues](https://code.foss.global/serve.zone/spark/issues)
|
||||||
|
- 📖 [View Source](https://code.foss.global/serve.zone/spark)
|
||||||
```typescript
|
|
||||||
import { Spark, SparkUpdateManager } from '@serve.zone/spark';
|
|
||||||
import { DockerSecret } from '@apiclient.xyz/docker';
|
|
||||||
|
|
||||||
const sparkInstance = new Spark();
|
|
||||||
const updateManager = new SparkUpdateManager(sparkInstance);
|
|
||||||
|
|
||||||
const createDockerSecret = async () => {
|
|
||||||
const secret = await DockerSecret.createSecret(updateManager.dockerHost, {
|
|
||||||
name: 'dbPassword',
|
|
||||||
contentArg: 'superSecretPassword',
|
|
||||||
});
|
|
||||||
console.log(`Secret ${secret.Spec.Name} created.`);
|
|
||||||
};
|
|
||||||
|
|
||||||
createDockerSecret();
|
|
||||||
```
|
|
||||||
|
|
||||||
This example shows how to create a Docker secret using Spark's `SparkUpdateManager` class, ensuring that sensitive information is securely stored and managed.
|
|
||||||
|
|
||||||
## Conclusion
|
|
||||||
`@serve.zone/spark` is a comprehensive toolkit for orchestrating and managing server environments and Docker-based services. By leveraging its CLI and programmatic interfaces, you can automate and streamline server operations, configurations, updates, and task scheduling, ensuring your infrastructure is responsive, updated, and maintained efficiently.
|
|
||||||
|
|
||||||
## License and Legal Information
|
## License and Legal Information
|
||||||
|
|
||||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
||||||
|
|
||||||
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
@@ -295,9 +560,9 @@ This project is owned and maintained by Task Venture Capital GmbH. The names and
|
|||||||
|
|
||||||
### Company Information
|
### Company Information
|
||||||
|
|
||||||
Task Venture Capital GmbH
|
Task Venture Capital GmbH
|
||||||
Registered at District court Bremen HRB 35230 HB, Germany
|
Registered at District court Bremen HRB 35230 HB, Germany
|
||||||
|
|
||||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
||||||
|
|
||||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||||
72
scripts/compile-all.sh
Executable file
72
scripts/compile-all.sh
Executable file
@@ -0,0 +1,72 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Get version from deno.json
|
||||||
|
VERSION=$(cat deno.json | grep -o '"version": *"[^"]*"' | cut -d'"' -f4)
|
||||||
|
BINARY_DIR="dist/binaries"
|
||||||
|
|
||||||
|
echo "================================================"
|
||||||
|
echo " SPARK Compilation Script"
|
||||||
|
echo " Version: ${VERSION}"
|
||||||
|
echo "================================================"
|
||||||
|
echo ""
|
||||||
|
echo "Compiling for all supported platforms..."
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Clean up old binaries and create fresh directory
|
||||||
|
rm -rf "$BINARY_DIR"
|
||||||
|
mkdir -p "$BINARY_DIR"
|
||||||
|
echo "→ Cleaned old binaries from $BINARY_DIR"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Linux x86_64
|
||||||
|
echo "→ Compiling for Linux x86_64..."
|
||||||
|
deno compile --allow-all --no-check --output "$BINARY_DIR/spark-linux-x64" \
|
||||||
|
--target x86_64-unknown-linux-gnu mod.ts
|
||||||
|
echo " ✓ Linux x86_64 complete"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Linux ARM64
|
||||||
|
echo "→ Compiling for Linux ARM64..."
|
||||||
|
deno compile --allow-all --no-check --output "$BINARY_DIR/spark-linux-arm64" \
|
||||||
|
--target aarch64-unknown-linux-gnu mod.ts
|
||||||
|
echo " ✓ Linux ARM64 complete"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# macOS x86_64
|
||||||
|
echo "→ Compiling for macOS x86_64..."
|
||||||
|
deno compile --allow-all --no-check --output "$BINARY_DIR/spark-macos-x64" \
|
||||||
|
--target x86_64-apple-darwin mod.ts
|
||||||
|
echo " ✓ macOS x86_64 complete"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# macOS ARM64
|
||||||
|
echo "→ Compiling for macOS ARM64..."
|
||||||
|
deno compile --allow-all --no-check --output "$BINARY_DIR/spark-macos-arm64" \
|
||||||
|
--target aarch64-apple-darwin mod.ts
|
||||||
|
echo " ✓ macOS ARM64 complete"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Windows x86_64
|
||||||
|
echo "→ Compiling for Windows x86_64..."
|
||||||
|
deno compile --allow-all --no-check --output "$BINARY_DIR/spark-windows-x64.exe" \
|
||||||
|
--target x86_64-pc-windows-msvc mod.ts
|
||||||
|
echo " ✓ Windows x86_64 complete"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "================================================"
|
||||||
|
echo " Compilation Summary"
|
||||||
|
echo "================================================"
|
||||||
|
echo ""
|
||||||
|
ls -lh "$BINARY_DIR/" | tail -n +2
|
||||||
|
echo ""
|
||||||
|
echo "✓ All binaries compiled successfully!"
|
||||||
|
echo ""
|
||||||
|
echo "Binary location: $BINARY_DIR/"
|
||||||
|
echo ""
|
||||||
|
echo "To create a release:"
|
||||||
|
echo " 1. Test the binaries on their respective platforms"
|
||||||
|
echo " 2. Create a git tag: git tag v${VERSION}"
|
||||||
|
echo " 3. Push the tag: git push origin v${VERSION}"
|
||||||
|
echo " 4. Upload the binaries to the release"
|
||||||
|
echo ""
|
||||||
231
scripts/install-binary.js
Executable file
231
scripts/install-binary.js
Executable file
@@ -0,0 +1,231 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SPARK npm postinstall script
|
||||||
|
* Downloads the appropriate binary for the current platform from Gitea releases
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { platform, arch } from 'os';
|
||||||
|
import { existsSync, mkdirSync, writeFileSync, chmodSync, unlinkSync } from 'fs';
|
||||||
|
import { join, dirname } from 'path';
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
|
import https from 'https';
|
||||||
|
import { pipeline } from 'stream';
|
||||||
|
import { promisify } from 'util';
|
||||||
|
import { createWriteStream } from 'fs';
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = dirname(__filename);
|
||||||
|
const streamPipeline = promisify(pipeline);
|
||||||
|
|
||||||
|
// Configuration
|
||||||
|
const REPO_BASE = 'https://code.foss.global/serve.zone/spark';
|
||||||
|
const VERSION = process.env.npm_package_version || '1.2.2';
|
||||||
|
|
||||||
|
function getBinaryInfo() {
|
||||||
|
const plat = platform();
|
||||||
|
const architecture = arch();
|
||||||
|
|
||||||
|
const platformMap = {
|
||||||
|
'darwin': 'macos',
|
||||||
|
'linux': 'linux',
|
||||||
|
'win32': 'windows'
|
||||||
|
};
|
||||||
|
|
||||||
|
const archMap = {
|
||||||
|
'x64': 'x64',
|
||||||
|
'arm64': 'arm64'
|
||||||
|
};
|
||||||
|
|
||||||
|
const mappedPlatform = platformMap[plat];
|
||||||
|
const mappedArch = archMap[architecture];
|
||||||
|
|
||||||
|
if (!mappedPlatform || !mappedArch) {
|
||||||
|
return { supported: false, platform: plat, arch: architecture };
|
||||||
|
}
|
||||||
|
|
||||||
|
let binaryName = `spark-${mappedPlatform}-${mappedArch}`;
|
||||||
|
if (plat === 'win32') {
|
||||||
|
binaryName += '.exe';
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
supported: true,
|
||||||
|
platform: mappedPlatform,
|
||||||
|
arch: mappedArch,
|
||||||
|
binaryName,
|
||||||
|
originalPlatform: plat
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function downloadFile(url, destination) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
console.log(`Downloading from: ${url}`);
|
||||||
|
|
||||||
|
// Follow redirects
|
||||||
|
const download = (url, redirectCount = 0) => {
|
||||||
|
if (redirectCount > 5) {
|
||||||
|
reject(new Error('Too many redirects'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
https.get(url, (response) => {
|
||||||
|
if (response.statusCode === 301 || response.statusCode === 302) {
|
||||||
|
console.log(`Following redirect to: ${response.headers.location}`);
|
||||||
|
download(response.headers.location, redirectCount + 1);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.statusCode !== 200) {
|
||||||
|
reject(new Error(`Failed to download: ${response.statusCode} ${response.statusMessage}`));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const totalSize = parseInt(response.headers['content-length'], 10);
|
||||||
|
let downloadedSize = 0;
|
||||||
|
let lastProgress = 0;
|
||||||
|
|
||||||
|
response.on('data', (chunk) => {
|
||||||
|
downloadedSize += chunk.length;
|
||||||
|
const progress = Math.round((downloadedSize / totalSize) * 100);
|
||||||
|
|
||||||
|
// Only log every 10% to reduce noise
|
||||||
|
if (progress >= lastProgress + 10) {
|
||||||
|
console.log(`Download progress: ${progress}%`);
|
||||||
|
lastProgress = progress;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const file = createWriteStream(destination);
|
||||||
|
|
||||||
|
pipeline(response, file, (err) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
console.log('Download complete!');
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}).on('error', reject);
|
||||||
|
};
|
||||||
|
|
||||||
|
download(url);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
console.log('===========================================');
|
||||||
|
console.log(' SPARK - Binary Installation');
|
||||||
|
console.log('===========================================');
|
||||||
|
console.log('');
|
||||||
|
|
||||||
|
const binaryInfo = getBinaryInfo();
|
||||||
|
|
||||||
|
if (!binaryInfo.supported) {
|
||||||
|
console.error(`❌ Error: Unsupported platform/architecture: ${binaryInfo.platform}/${binaryInfo.arch}`);
|
||||||
|
console.error('');
|
||||||
|
console.error('Supported platforms:');
|
||||||
|
console.error(' • Linux (x64, arm64)');
|
||||||
|
console.error(' • macOS (x64, arm64)');
|
||||||
|
console.error(' • Windows (x64)');
|
||||||
|
console.error('');
|
||||||
|
console.error('If you believe your platform should be supported, please file an issue:');
|
||||||
|
console.error(' https://code.foss.global/serve.zone/spark/issues');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Platform: ${binaryInfo.platform} (${binaryInfo.originalPlatform})`);
|
||||||
|
console.log(`Architecture: ${binaryInfo.arch}`);
|
||||||
|
console.log(`Binary: ${binaryInfo.binaryName}`);
|
||||||
|
console.log(`Version: ${VERSION}`);
|
||||||
|
console.log('');
|
||||||
|
|
||||||
|
// Create dist/binaries directory if it doesn't exist
|
||||||
|
const binariesDir = join(__dirname, '..', 'dist', 'binaries');
|
||||||
|
if (!existsSync(binariesDir)) {
|
||||||
|
console.log('Creating binaries directory...');
|
||||||
|
mkdirSync(binariesDir, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
const binaryPath = join(binariesDir, binaryInfo.binaryName);
|
||||||
|
|
||||||
|
// Check if binary already exists and skip download
|
||||||
|
if (existsSync(binaryPath)) {
|
||||||
|
console.log('✓ Binary already exists, skipping download');
|
||||||
|
} else {
|
||||||
|
// Construct download URL
|
||||||
|
// Try release URL first, fall back to raw branch if needed
|
||||||
|
const releaseUrl = `${REPO_BASE}/releases/download/v${VERSION}/${binaryInfo.binaryName}`;
|
||||||
|
const fallbackUrl = `${REPO_BASE}/raw/branch/master/dist/binaries/${binaryInfo.binaryName}`;
|
||||||
|
|
||||||
|
console.log('Downloading platform-specific binary...');
|
||||||
|
console.log('This may take a moment depending on your connection speed.');
|
||||||
|
console.log('');
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Try downloading from release
|
||||||
|
await downloadFile(releaseUrl, binaryPath);
|
||||||
|
} catch (err) {
|
||||||
|
console.log(`Release download failed: ${err.message}`);
|
||||||
|
console.log('Trying fallback URL...');
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Try fallback URL
|
||||||
|
await downloadFile(fallbackUrl, binaryPath);
|
||||||
|
} catch (fallbackErr) {
|
||||||
|
console.error(`❌ Error: Failed to download binary`);
|
||||||
|
console.error(` Primary URL: ${releaseUrl}`);
|
||||||
|
console.error(` Fallback URL: ${fallbackUrl}`);
|
||||||
|
console.error('');
|
||||||
|
console.error('This might be because:');
|
||||||
|
console.error('1. The release has not been created yet');
|
||||||
|
console.error('2. Network connectivity issues');
|
||||||
|
console.error('3. The version specified does not exist');
|
||||||
|
console.error('');
|
||||||
|
console.error('You can try:');
|
||||||
|
console.error('1. Installing from source: https://code.foss.global/serve.zone/spark');
|
||||||
|
console.error('2. Downloading the binary manually from the releases page');
|
||||||
|
console.error('3. Using the install script: curl -sSL https://code.foss.global/serve.zone/spark/raw/branch/master/install.sh | sudo bash');
|
||||||
|
|
||||||
|
// Clean up partial download
|
||||||
|
if (existsSync(binaryPath)) {
|
||||||
|
unlinkSync(binaryPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`✓ Binary downloaded successfully`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// On Unix-like systems, ensure the binary is executable
|
||||||
|
if (binaryInfo.originalPlatform !== 'win32') {
|
||||||
|
try {
|
||||||
|
console.log('Setting executable permissions...');
|
||||||
|
chmodSync(binaryPath, 0o755);
|
||||||
|
console.log('✓ Binary permissions updated');
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`⚠️ Warning: Could not set executable permissions: ${err.message}`);
|
||||||
|
console.error(' You may need to manually run:');
|
||||||
|
console.error(` chmod +x ${binaryPath}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('');
|
||||||
|
console.log('✅ SPARK installation completed successfully!');
|
||||||
|
console.log('');
|
||||||
|
console.log('You can now use SPARK by running:');
|
||||||
|
console.log(' spark --help');
|
||||||
|
console.log('');
|
||||||
|
console.log('For daemon setup, run:');
|
||||||
|
console.log(' sudo spark installdaemon');
|
||||||
|
console.log('');
|
||||||
|
console.log('===========================================');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the installation
|
||||||
|
main().catch(err => {
|
||||||
|
console.error(`❌ Installation failed: ${err.message}`);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
26
test.simple.ts
Normal file
26
test.simple.ts
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
#!/usr/bin/env -S deno run --allow-all
|
||||||
|
|
||||||
|
// Simple test to verify basic Deno functionality
|
||||||
|
import * as path from '@std/path';
|
||||||
|
|
||||||
|
console.log('Testing Deno migration for Spark...');
|
||||||
|
console.log('');
|
||||||
|
console.log('✅ Deno runtime works');
|
||||||
|
console.log('✅ Standard library imports work');
|
||||||
|
|
||||||
|
// Test path functionality
|
||||||
|
const testPath = path.join('/opt', 'spark');
|
||||||
|
console.log(`✅ Path operations work: ${testPath}`);
|
||||||
|
|
||||||
|
// Test basic imports from plugins
|
||||||
|
import * as smartdelay from '@push.rocks/smartdelay';
|
||||||
|
console.log('✅ @push.rocks/smartdelay import works');
|
||||||
|
|
||||||
|
import * as smartlog from '@push.rocks/smartlog';
|
||||||
|
console.log('✅ @push.rocks/smartlog import works');
|
||||||
|
|
||||||
|
console.log('');
|
||||||
|
console.log('Basic Deno functionality confirmed!');
|
||||||
|
console.log('');
|
||||||
|
console.log('Note: Full application may require additional dependency resolution');
|
||||||
|
console.log('for complex packages like @serve.zone/api that have many transitive dependencies.');
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
import { expect, tap } from '@push.rocks/tapbundle';
|
|
||||||
import * as spark from '../ts/index.js';
|
|
||||||
|
|
||||||
let testSpark: spark.Spark;
|
|
||||||
|
|
||||||
tap.test('should create a spark instance', async () => {
|
|
||||||
testSpark = new spark.Spark();
|
|
||||||
expect(testSpark).toBeInstanceOf(spark.Spark);
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.start();
|
|
||||||
30
test/test.ts
Normal file
30
test/test.ts
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
import { assert, assertEquals, assertExists } from '@std/assert';
|
||||||
|
import * as spark from '../ts/index.ts';
|
||||||
|
|
||||||
|
let testSpark: spark.Spark;
|
||||||
|
|
||||||
|
Deno.test('should create a spark instance', () => {
|
||||||
|
testSpark = new spark.Spark();
|
||||||
|
assert(testSpark instanceof spark.Spark);
|
||||||
|
assertExists(testSpark);
|
||||||
|
});
|
||||||
|
|
||||||
|
Deno.test('should have spark info', () => {
|
||||||
|
assertExists(testSpark.sparkInfo);
|
||||||
|
assertExists(testSpark.sparkInfo.projectInfo);
|
||||||
|
assertEquals(typeof testSpark.sparkInfo.projectInfo.name, 'string');
|
||||||
|
});
|
||||||
|
|
||||||
|
Deno.test('should have spark config', () => {
|
||||||
|
assertExists(testSpark.sparkConfig);
|
||||||
|
assertExists(testSpark.sparkConfig.kvStore);
|
||||||
|
});
|
||||||
|
|
||||||
|
Deno.test('should have update manager', () => {
|
||||||
|
assertExists(testSpark.sparkUpdateManager);
|
||||||
|
assert(Array.isArray(testSpark.sparkUpdateManager.services));
|
||||||
|
});
|
||||||
|
|
||||||
|
Deno.test('should have task manager', () => {
|
||||||
|
assertExists(testSpark.sparkTaskManager);
|
||||||
|
});
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
/**
|
/**
|
||||||
* autocreated commitinfo by @pushrocks/commitinfo
|
* autocreated commitinfo by @push.rocks/commitinfo
|
||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@serve.zone/spark',
|
name: '@serve.zone/spark',
|
||||||
version: '1.0.89',
|
version: '1.2.3',
|
||||||
description: 'A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure.'
|
description: 'A comprehensive tool for maintaining and configuring servers, integrating with Docker and supporting advanced task scheduling, targeted at the Servezone infrastructure and used by @serve.zone/cloudly as a cluster node server system manager.'
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
export * from './spark.classes.spark.js';
|
export * from './spark.classes.spark.ts';
|
||||||
|
|
||||||
import * as cli from './spark.cli.js';
|
import * as cli from './spark.cli.ts';
|
||||||
|
|
||||||
export const runCli = async () => {
|
export const runCli = async () => {
|
||||||
cli.runCli();
|
cli.runCli();
|
||||||
|
|||||||
@@ -1,9 +1,14 @@
|
|||||||
import * as plugins from './spark.plugins.js';
|
import * as plugins from './spark.plugins.ts';
|
||||||
import { Spark } from './index.js';
|
import { Spark } from './index.ts';
|
||||||
|
|
||||||
export class SparkConfig {
|
export class SparkConfig {
|
||||||
public sparkRef: Spark;
|
public sparkRef: Spark;
|
||||||
|
public kvStore: plugins.npmextra.KeyValueStore;
|
||||||
constructor(sparkRefArg: Spark) {
|
constructor(sparkRefArg: Spark) {
|
||||||
this.sparkRef = sparkRefArg;
|
this.sparkRef = sparkRefArg;
|
||||||
|
this.kvStore = new plugins.npmextra.KeyValueStore({
|
||||||
|
typeArg: 'userHomeDir',
|
||||||
|
identityArg: 'servezone_spark',
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import * as plugins from './spark.plugins.js';
|
import * as plugins from './spark.plugins.ts';
|
||||||
import * as paths from './spark.paths.js';
|
import * as paths from './spark.paths.ts';
|
||||||
import { Spark } from './spark.classes.spark.js';
|
import { Spark } from './spark.classes.spark.ts';
|
||||||
|
|
||||||
export class SparkInfo {
|
export class SparkInfo {
|
||||||
public sparkRef: Spark;
|
public sparkRef: Spark;
|
||||||
|
|||||||
@@ -1,15 +0,0 @@
|
|||||||
import * as plugins from './spark.plugins.js';
|
|
||||||
import { Spark } from './index.js';
|
|
||||||
|
|
||||||
export class SparkLocalConfig {
|
|
||||||
public sparkRef: Spark;
|
|
||||||
private kvStore: plugins.npmextra.KeyValueStore;
|
|
||||||
|
|
||||||
constructor(sparkRefArg: Spark) {
|
|
||||||
this.sparkRef = sparkRefArg;
|
|
||||||
this.kvStore = new plugins.npmextra.KeyValueStore({
|
|
||||||
typeArg: 'userHomeDir',
|
|
||||||
identityArg: 'spark',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,23 +1,23 @@
|
|||||||
import * as plugins from './spark.plugins.js';
|
import * as plugins from './spark.plugins.ts';
|
||||||
import { SparkTaskManager } from './spark.classes.taskmanager.js';
|
import { SparkTaskManager } from './spark.classes.taskmanager.ts';
|
||||||
import { SparkInfo } from './spark.classes.info.js';
|
import { SparkInfo } from './spark.classes.info.ts';
|
||||||
import { SparkUpdateManager } from './spark.classes.updatemanager.js';
|
import { SparkServicesManager } from './spark.classes.updatemanager.ts';
|
||||||
import { logger } from './spark.logging.js';
|
import { logger } from './spark.logging.ts';
|
||||||
import { SparkLocalConfig } from './spark.classes.localconfig.js';
|
import { SparkConfig } from './spark.classes.config.ts';
|
||||||
|
|
||||||
export class Spark {
|
export class Spark {
|
||||||
public smartdaemon: plugins.smartdaemon.SmartDaemon;
|
public smartdaemon: plugins.smartdaemon.SmartDaemon;
|
||||||
public sparkLocalConfig: SparkLocalConfig;
|
public sparkConfig: SparkConfig;
|
||||||
public sparkTaskManager: SparkTaskManager;
|
public sparkTaskManager: SparkTaskManager;
|
||||||
public sparkInfo: SparkInfo;
|
public sparkInfo: SparkInfo;
|
||||||
public sparkUpdateManager: SparkUpdateManager;
|
public sparkUpdateManager: SparkServicesManager;
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
this.smartdaemon = new plugins.smartdaemon.SmartDaemon();
|
this.smartdaemon = new plugins.smartdaemon.SmartDaemon();
|
||||||
this.sparkLocalConfig = new SparkLocalConfig(this);
|
this.sparkConfig = new SparkConfig(this);
|
||||||
this.sparkInfo = new SparkInfo(this);
|
this.sparkInfo = new SparkInfo(this);
|
||||||
this.sparkTaskManager = new SparkTaskManager(this);
|
this.sparkTaskManager = new SparkTaskManager(this);
|
||||||
this.sparkUpdateManager = new SparkUpdateManager(this);
|
this.sparkUpdateManager = new SparkServicesManager(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async daemonStart() {
|
public async daemonStart() {
|
||||||
|
|||||||
@@ -1,32 +1,21 @@
|
|||||||
import * as plugins from './spark.plugins.js';
|
import * as plugins from './spark.plugins.ts';
|
||||||
import { Spark } from './index.js';
|
import { Spark } from './index.ts';
|
||||||
import * as paths from './spark.paths.js';
|
import * as paths from './spark.paths.ts';
|
||||||
import { logger } from './spark.logging.js';
|
import { logger } from './spark.logging.ts';
|
||||||
|
|
||||||
export class SparkTaskManager {
|
export class SparkTaskManager {
|
||||||
public sparkRef: Spark;
|
public sparkRef: Spark;
|
||||||
public taskmanager: plugins.taskbuffer.TaskManager;
|
public taskmanager: plugins.taskbuffer.TaskManager;
|
||||||
|
|
||||||
// tasks
|
// tasks
|
||||||
public checkinSlackTask: plugins.taskbuffer.Task;
|
|
||||||
public updateSpark: plugins.taskbuffer.Task;
|
public updateSpark: plugins.taskbuffer.Task;
|
||||||
public updateHost: plugins.taskbuffer.Task;
|
public updateHost: plugins.taskbuffer.Task;
|
||||||
public updateCloudly: plugins.taskbuffer.Task;
|
public updateServices: plugins.taskbuffer.Task;
|
||||||
|
|
||||||
constructor(sparkRefArg: Spark) {
|
constructor(sparkRefArg: Spark) {
|
||||||
this.sparkRef = sparkRefArg;
|
this.sparkRef = sparkRefArg;
|
||||||
this.taskmanager = new plugins.taskbuffer.TaskManager();
|
this.taskmanager = new plugins.taskbuffer.TaskManager();
|
||||||
|
|
||||||
// checkinOnSlack
|
|
||||||
this.checkinSlackTask = new plugins.taskbuffer.Task({
|
|
||||||
name: 'checkinSlack',
|
|
||||||
taskFunction: async () => {
|
|
||||||
logger.log('ok', 'running hourly checkin now');
|
|
||||||
|
|
||||||
logger.log('info', 'completed hourly checkin');
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
// updateSpark
|
// updateSpark
|
||||||
this.updateSpark = new plugins.taskbuffer.Task({
|
this.updateSpark = new plugins.taskbuffer.Task({
|
||||||
name: 'updateSpark',
|
name: 'updateSpark',
|
||||||
@@ -48,7 +37,7 @@ export class SparkTaskManager {
|
|||||||
logger.log('info', 'Cooling off before restart...');
|
logger.log('info', 'Cooling off before restart...');
|
||||||
await plugins.smartdelay.delayFor(5000);
|
await plugins.smartdelay.delayFor(5000);
|
||||||
logger.log('ok', '######## Trying to exit / Restart expected... ########');
|
logger.log('ok', '######## Trying to exit / Restart expected... ########');
|
||||||
process.exit(0);
|
Deno.exit(0);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
@@ -67,7 +56,10 @@ export class SparkTaskManager {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
this.updateCloudly = new plugins.taskbuffer.Task({
|
/**
|
||||||
|
* only being run when mode is cloudly
|
||||||
|
*/
|
||||||
|
this.updateServices = new plugins.taskbuffer.Task({
|
||||||
name: 'updateCloudly',
|
name: 'updateCloudly',
|
||||||
taskFunction: async () => {
|
taskFunction: async () => {
|
||||||
logger.log('info', 'now running updateCloudly task');
|
logger.log('info', 'now running updateCloudly task');
|
||||||
@@ -80,10 +72,9 @@ export class SparkTaskManager {
|
|||||||
* start the taskmanager
|
* start the taskmanager
|
||||||
*/
|
*/
|
||||||
public async start() {
|
public async start() {
|
||||||
this.taskmanager.addAndScheduleTask(this.checkinSlackTask, '0 0 * * * *');
|
this.taskmanager.addAndScheduleTask(this.updateServices, '30 */2 * * * *');
|
||||||
this.taskmanager.addAndScheduleTask(this.updateSpark, '0 * * * * *');
|
this.taskmanager.addAndScheduleTask(this.updateSpark, '0 * * * * *');
|
||||||
this.taskmanager.addAndScheduleTask(this.updateHost, '0 0 0 * * *');
|
this.taskmanager.addAndScheduleTask(this.updateHost, '0 0 0 * * *');
|
||||||
this.taskmanager.addAndScheduleTask(this.updateCloudly, '30 */2 * * * *');
|
|
||||||
this.taskmanager.start();
|
this.taskmanager.start();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -91,10 +82,9 @@ export class SparkTaskManager {
|
|||||||
* stops the taskmanager
|
* stops the taskmanager
|
||||||
*/
|
*/
|
||||||
public async stop() {
|
public async stop() {
|
||||||
this.taskmanager.descheduleTask(this.checkinSlackTask);
|
|
||||||
this.taskmanager.descheduleTask(this.updateSpark);
|
this.taskmanager.descheduleTask(this.updateSpark);
|
||||||
this.taskmanager.descheduleTask(this.updateHost);
|
this.taskmanager.descheduleTask(this.updateHost);
|
||||||
this.taskmanager.descheduleTask(this.updateCloudly);
|
this.taskmanager.descheduleTask(this.updateServices);
|
||||||
this.taskmanager.stop();
|
this.taskmanager.stop();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,12 +1,28 @@
|
|||||||
import * as plugins from './spark.plugins.js';
|
import * as plugins from './spark.plugins.ts';
|
||||||
import * as paths from './spark.paths.js';
|
import * as paths from './spark.paths.ts';
|
||||||
import { Spark } from './spark.classes.spark.js';
|
import { Spark } from './spark.classes.spark.ts';
|
||||||
import { logger } from './spark.logging.js';
|
import { logger } from './spark.logging.ts';
|
||||||
|
|
||||||
export class SparkUpdateManager {
|
/**
|
||||||
|
* this class takes care of updating the services that are managed by spark
|
||||||
|
*/
|
||||||
|
export class SparkServicesManager {
|
||||||
public sparkRef: Spark;
|
public sparkRef: Spark;
|
||||||
public dockerHost: plugins.docker.DockerHost;
|
public dockerHost: plugins.docker.DockerHost;
|
||||||
public smartupdate: plugins.smartupdate.SmartUpdate;
|
public smartupdate: plugins.smartupdate.SmartUpdate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* the services that are managed by spark
|
||||||
|
*/
|
||||||
|
services: Array<{
|
||||||
|
name: string;
|
||||||
|
image: string;
|
||||||
|
url: string;
|
||||||
|
port: string;
|
||||||
|
environment: string;
|
||||||
|
secretJson: any;
|
||||||
|
}> = [];
|
||||||
|
|
||||||
constructor(sparkrefArg: Spark) {
|
constructor(sparkrefArg: Spark) {
|
||||||
this.sparkRef = sparkrefArg;
|
this.sparkRef = sparkrefArg;
|
||||||
this.dockerHost = new plugins.docker.DockerHost({});
|
this.dockerHost = new plugins.docker.DockerHost({});
|
||||||
@@ -21,109 +37,58 @@ export class SparkUpdateManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public async updateServices() {
|
public async updateServices() {
|
||||||
if (
|
for (const service of this.services) {
|
||||||
plugins.smartfile.fs.isDirectory(plugins.path.join(paths.homeDir, 'serve.zone/spark')) &&
|
const existingService = await plugins.docker.DockerService.getServiceByName(
|
||||||
(await plugins.smartfile.fs.fileExists(
|
this.dockerHost,
|
||||||
plugins.path.join(paths.homeDir, 'serve.zone/spark/spark.json')
|
service.name
|
||||||
))
|
);
|
||||||
) {
|
const existingServiceSecret = await plugins.docker.DockerSecret.getSecretByName(
|
||||||
const services: Array<{
|
this.dockerHost,
|
||||||
name: string;
|
`${service.name}Secret`
|
||||||
image: string;
|
);
|
||||||
url: string;
|
if (existingService) {
|
||||||
port: string;
|
const needsUpdate: boolean = await existingService.needsUpdate();
|
||||||
environment: string;
|
if (!needsUpdate) {
|
||||||
secretJson: any;
|
logger.log('info', `service >>${service.name}<< not needing update.`);
|
||||||
}> = [];
|
// we simply return here to end the functions
|
||||||
// lets add coreflow
|
return;
|
||||||
services.push({
|
|
||||||
name: `coreflow`,
|
|
||||||
image: `code.foss.global/serve.zone/coreflow`,
|
|
||||||
url: `coreflow`,
|
|
||||||
environment: `production`,
|
|
||||||
port: `3000`,
|
|
||||||
secretJson: {
|
|
||||||
SERVEZONE_PORT: `3000`,
|
|
||||||
SERVEZONE_ENVIRONMENT: `production`,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
services.push({
|
|
||||||
name: `coretraffic`,
|
|
||||||
image: `code.foss.global/serve.zone/coretraffic`,
|
|
||||||
url: `coreflow`,
|
|
||||||
environment: `production`,
|
|
||||||
port: `3000`,
|
|
||||||
secretJson: {
|
|
||||||
SERVEZONE_PORT: `3000`,
|
|
||||||
SERVEZONE_ENVIRONMENT: `production`,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
services.push({
|
|
||||||
name: `corelog`,
|
|
||||||
image: `code.foss.global/serve.zone/corelog`,
|
|
||||||
url: `coreflow`,
|
|
||||||
environment: `production`,
|
|
||||||
port: `3000`,
|
|
||||||
secretJson: {
|
|
||||||
SERVEZONE_PORT: `3000`,
|
|
||||||
SERVEZONE_ENVIRONMENT: `production`,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
// lets add coretraffic
|
|
||||||
|
|
||||||
for (const service of services) {
|
|
||||||
const existingService = await plugins.docker.DockerService.getServiceByName(
|
|
||||||
this.dockerHost,
|
|
||||||
service.name
|
|
||||||
);
|
|
||||||
const existingServiceSecret = await plugins.docker.DockerSecret.getSecretByName(
|
|
||||||
this.dockerHost,
|
|
||||||
`${service.name}Secret`
|
|
||||||
);
|
|
||||||
if (existingService) {
|
|
||||||
const needsUpdate: boolean = await existingService.needsUpdate();
|
|
||||||
if (!needsUpdate) {
|
|
||||||
logger.log('info', `not needing update.`);
|
|
||||||
// we simply return here to end the functions
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
logger.log('ok', `${service.name} needs to be updated!`);
|
|
||||||
await existingService.remove();
|
|
||||||
await existingServiceSecret.remove();
|
|
||||||
}
|
}
|
||||||
if (!existingService && existingServiceSecret) {
|
// continuing here means we need to update the service
|
||||||
await existingServiceSecret.remove();
|
logger.log('ok', `${service.name} needs to be updated!`);
|
||||||
}
|
await existingService.remove();
|
||||||
|
await existingServiceSecret.remove();
|
||||||
const newServiceImage = await plugins.docker.DockerImage.createFromRegistry(
|
|
||||||
this.dockerHost,
|
|
||||||
{
|
|
||||||
creationObject: {
|
|
||||||
imageUrl: service.image,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
);
|
|
||||||
const newServiceSecret = await plugins.docker.DockerSecret.createSecret(this.dockerHost, {
|
|
||||||
name: `${service.name}Secret`,
|
|
||||||
contentArg: plugins.smartjson.stringify(service.secretJson),
|
|
||||||
version: await newServiceImage.getVersion(),
|
|
||||||
labels: {},
|
|
||||||
});
|
|
||||||
const newService = await plugins.docker.DockerService.createService(this.dockerHost, {
|
|
||||||
image: newServiceImage,
|
|
||||||
labels: {},
|
|
||||||
name: service.name,
|
|
||||||
networkAlias: service.name,
|
|
||||||
networks: [],
|
|
||||||
secrets: [newServiceSecret],
|
|
||||||
ports: [`${service.port}:${service.secretJson.SERVEZONE_PORT}`],
|
|
||||||
});
|
|
||||||
logger.log('ok', `updated service >>${newService.Spec.Name}<<!`);
|
|
||||||
}
|
}
|
||||||
logger.log('success', `updated ${services.length} services!`);
|
if (!existingService && existingServiceSecret) {
|
||||||
|
await existingServiceSecret.remove();
|
||||||
|
}
|
||||||
|
|
||||||
|
const newServiceImage = await plugins.docker.DockerImage.createFromRegistry(
|
||||||
|
this.dockerHost,
|
||||||
|
{
|
||||||
|
creationObject: {
|
||||||
|
imageUrl: service.image,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
);
|
||||||
|
const newServiceSecret = await plugins.docker.DockerSecret.createSecret(this.dockerHost, {
|
||||||
|
name: `${service.name}Secret`,
|
||||||
|
contentArg: plugins.smartjson.stringify(service.secretJson),
|
||||||
|
version: await newServiceImage.getVersion(),
|
||||||
|
labels: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
const newService = await plugins.docker.DockerService.createService(this.dockerHost, {
|
||||||
|
image: newServiceImage,
|
||||||
|
labels: {},
|
||||||
|
name: service.name,
|
||||||
|
networkAlias: service.name,
|
||||||
|
networks: [],
|
||||||
|
secrets: [newServiceSecret],
|
||||||
|
ports: [`${service.port}:${service.secretJson.SERVEZONE_PORT}`],
|
||||||
|
});
|
||||||
|
logger.log('ok', `updated service >>${newService.Spec.Name}<<!`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
logger.log('success', `updated ${this.services.length} services!`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import * as plugins from './spark.plugins.js';
|
import * as plugins from './spark.plugins.ts';
|
||||||
import * as paths from './spark.paths.js';
|
import * as paths from './spark.paths.ts';
|
||||||
import { Spark } from './spark.classes.spark.js';
|
import { Spark } from './spark.classes.spark.ts';
|
||||||
import { logger } from './spark.logging.js';
|
import { logger } from './spark.logging.ts';
|
||||||
|
|
||||||
export const runCli = async () => {
|
export const runCli = async () => {
|
||||||
const smartshellInstance = new plugins.smartshell.Smartshell({
|
const smartshellInstance = new plugins.smartshell.Smartshell({
|
||||||
@@ -45,6 +45,50 @@ export const runCli = async () => {
|
|||||||
smartcliInstance.addCommand('asdaemon').subscribe(async (argvArg) => {
|
smartcliInstance.addCommand('asdaemon').subscribe(async (argvArg) => {
|
||||||
logger.log('success', 'looks like we are running as daemon now');
|
logger.log('success', 'looks like we are running as daemon now');
|
||||||
logger.log('info', 'starting spark in daemon mode');
|
logger.log('info', 'starting spark in daemon mode');
|
||||||
|
|
||||||
|
// lets determine the mode if specified
|
||||||
|
let mode = argvArg.mode;
|
||||||
|
if (mode === 'cloudly') {
|
||||||
|
await sparkInstance.sparkConfig.kvStore.writeKey('mode', 'cloudly');
|
||||||
|
} else if (mode === 'coreflow-node') {
|
||||||
|
await sparkInstance.sparkConfig.kvStore.writeKey('mode', 'coreflow-node');
|
||||||
|
} else if (mode) {
|
||||||
|
logger.log('error', 'unknown mode specified');
|
||||||
|
Deno.exit(1);
|
||||||
|
} else {
|
||||||
|
// mode is not specified by cli, lets get it from the config
|
||||||
|
mode = await sparkInstance.sparkConfig.kvStore.readKey('mode');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!mode) {
|
||||||
|
logger.log('error', 'no mode specified by either cli or config');
|
||||||
|
Deno.exit(1);
|
||||||
|
} else if (mode === 'cloudly') {
|
||||||
|
sparkInstance.sparkUpdateManager.services.push({
|
||||||
|
name: `coreflow`,
|
||||||
|
image: `code.foss.global/serve.zone/cloudly`,
|
||||||
|
url: `cloudly`,
|
||||||
|
environment: `production`,
|
||||||
|
port: `3000`,
|
||||||
|
secretJson: {
|
||||||
|
SERVEZONE_PORT: `3000`,
|
||||||
|
SERVEZONE_ENVIRONMENT: `production`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} else if (mode === 'coreflow-node') {
|
||||||
|
sparkInstance.sparkUpdateManager.services.push({
|
||||||
|
name: `coreflow`,
|
||||||
|
image: `code.foss.global/serve.zone/coreflow`,
|
||||||
|
url: `coreflow`,
|
||||||
|
environment: `production`,
|
||||||
|
port: `3000`,
|
||||||
|
secretJson: {
|
||||||
|
SERVEZONE_PORT: `3000`,
|
||||||
|
SERVEZONE_ENVIRONMENT: `production`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
await sparkInstance.daemonStart();
|
await sparkInstance.daemonStart();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import * as plugins from './spark.plugins.js';
|
import * as plugins from './spark.plugins.ts';
|
||||||
import * as paths from './spark.paths.js';
|
import * as paths from './spark.paths.ts';
|
||||||
import { commitinfo } from './00_commitinfo_data.js';
|
import { commitinfo } from './00_commitinfo_data.ts';
|
||||||
|
|
||||||
const projectInfoNpm = new plugins.projectinfo.ProjectinfoNpm(paths.packageDir);
|
const projectInfoNpm = new plugins.projectinfo.ProjectinfoNpm(paths.packageDir);
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import * as plugins from './spark.plugins.js';
|
import * as plugins from './spark.plugins.ts';
|
||||||
|
|
||||||
export const packageDir = plugins.path.join(plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url), '../');
|
export const packageDir = plugins.path.join(plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url), '../');
|
||||||
export const homeDir = plugins.smartpath.get.home();
|
export const homeDir = plugins.smartpath.get.home();
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
// node native scope
|
// std library scope
|
||||||
import * as path from 'path';
|
import * as path from '@std/path';
|
||||||
|
|
||||||
export { path };
|
export { path };
|
||||||
|
|
||||||
// @serve.zone scope
|
// @serve.zone scope
|
||||||
import * as servezoneInterfaces from '@serve.zone/interfaces';
|
import * as servezoneInterfaces from '@serve.zone/interfaces';
|
||||||
|
import * as servezoneApi from '@serve.zone/api';
|
||||||
|
|
||||||
export { servezoneInterfaces };
|
export { servezoneInterfaces, servezoneApi };
|
||||||
|
|
||||||
// @apiclient.xyz scope
|
// @apiclient.xyz scope
|
||||||
import * as docker from '@apiclient.xyz/docker';
|
import * as docker from '@apiclient.xyz/docker';
|
||||||
|
|||||||
@@ -1,14 +0,0 @@
|
|||||||
{
|
|
||||||
"compilerOptions": {
|
|
||||||
"experimentalDecorators": true,
|
|
||||||
"useDefineForClassFields": false,
|
|
||||||
"target": "ES2022",
|
|
||||||
"module": "NodeNext",
|
|
||||||
"moduleResolution": "NodeNext",
|
|
||||||
"esModuleInterop": true,
|
|
||||||
"verbatimModuleSyntax": true
|
|
||||||
},
|
|
||||||
"exclude": [
|
|
||||||
"dist_*/**/*.d.ts"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
140
uninstall.sh
Executable file
140
uninstall.sh
Executable file
@@ -0,0 +1,140 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# SPARK Uninstaller Script
|
||||||
|
# Completely removes SPARK from the system
|
||||||
|
|
||||||
|
# Check if running as root
|
||||||
|
if [ "$EUID" -ne 0 ]; then
|
||||||
|
echo "Please run as root (sudo ./uninstall.sh)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# This script can be called directly or through the CLI
|
||||||
|
# When called through the CLI, environment variables are set
|
||||||
|
# REMOVE_CONFIG=yes|no - whether to remove configuration files
|
||||||
|
# REMOVE_DATA=yes|no - whether to remove data files
|
||||||
|
|
||||||
|
# If not set through CLI, use defaults
|
||||||
|
REMOVE_CONFIG=${REMOVE_CONFIG:-"no"}
|
||||||
|
REMOVE_DATA=${REMOVE_DATA:-"no"}
|
||||||
|
|
||||||
|
echo "SPARK Uninstaller"
|
||||||
|
echo "================="
|
||||||
|
echo "This will completely remove SPARK from your system."
|
||||||
|
|
||||||
|
# Find the directory where this script is located
|
||||||
|
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||||
|
|
||||||
|
# Step 1: Stop and disable the systemd service if it exists
|
||||||
|
if [ -f "/etc/systemd/system/spark.service" ]; then
|
||||||
|
echo "Stopping SPARK service..."
|
||||||
|
systemctl stop spark.service 2>/dev/null
|
||||||
|
|
||||||
|
echo "Disabling SPARK service..."
|
||||||
|
systemctl disable spark.service 2>/dev/null
|
||||||
|
|
||||||
|
echo "Removing systemd service file..."
|
||||||
|
rm -f /etc/systemd/system/spark.service
|
||||||
|
|
||||||
|
echo "Reloading systemd daemon..."
|
||||||
|
systemctl daemon-reload
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Also check for legacy smartdaemon_spark service
|
||||||
|
if [ -f "/etc/systemd/system/smartdaemon_spark.service" ]; then
|
||||||
|
echo "Stopping legacy smartdaemon_spark service..."
|
||||||
|
systemctl stop smartdaemon_spark.service 2>/dev/null
|
||||||
|
|
||||||
|
echo "Disabling legacy smartdaemon_spark service..."
|
||||||
|
systemctl disable smartdaemon_spark.service 2>/dev/null
|
||||||
|
|
||||||
|
echo "Removing legacy systemd service file..."
|
||||||
|
rm -f /etc/systemd/system/smartdaemon_spark.service
|
||||||
|
|
||||||
|
echo "Reloading systemd daemon..."
|
||||||
|
systemctl daemon-reload
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Step 2: Remove global symlinks
|
||||||
|
if [ -L "/usr/local/bin/spark" ]; then
|
||||||
|
echo "Removing global symlink from /usr/local/bin/spark..."
|
||||||
|
rm -f /usr/local/bin/spark
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -L "/usr/bin/spark" ]; then
|
||||||
|
echo "Removing global symlink from /usr/bin/spark..."
|
||||||
|
rm -f /usr/bin/spark
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Step 3: Remove configuration if requested
|
||||||
|
if [ "$REMOVE_CONFIG" = "yes" ]; then
|
||||||
|
if [ -d "/etc/spark" ]; then
|
||||||
|
echo "Removing configuration directory /etc/spark..."
|
||||||
|
rm -rf /etc/spark
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
if [ -d "/etc/spark" ]; then
|
||||||
|
echo "Configuration preserved in /etc/spark (use --remove-config to remove)"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Step 4: Remove data if requested
|
||||||
|
if [ "$REMOVE_DATA" = "yes" ]; then
|
||||||
|
if [ -d "/var/lib/spark" ]; then
|
||||||
|
echo "Removing data directory /var/lib/spark..."
|
||||||
|
rm -rf /var/lib/spark
|
||||||
|
fi
|
||||||
|
if [ -d "/var/log/spark" ]; then
|
||||||
|
echo "Removing log directory /var/log/spark..."
|
||||||
|
rm -rf /var/log/spark
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
if [ -d "/var/lib/spark" ] || [ -d "/var/log/spark" ]; then
|
||||||
|
echo "Data and logs preserved (use --remove-data to remove)"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Step 5: Remove installation directory
|
||||||
|
if [ -d "/opt/spark" ]; then
|
||||||
|
echo "Removing installation directory /opt/spark..."
|
||||||
|
rm -rf /opt/spark
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Step 6: Clean up Docker containers and images if any
|
||||||
|
echo "Checking for SPARK-managed Docker resources..."
|
||||||
|
# List all containers with spark labels
|
||||||
|
SPARK_CONTAINERS=$(docker ps -aq --filter "label=com.servezone.spark" 2>/dev/null)
|
||||||
|
if [ -n "$SPARK_CONTAINERS" ]; then
|
||||||
|
echo "Stopping and removing SPARK-managed containers..."
|
||||||
|
docker stop $SPARK_CONTAINERS 2>/dev/null
|
||||||
|
docker rm $SPARK_CONTAINERS 2>/dev/null
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "================================================"
|
||||||
|
echo " SPARK Uninstallation Complete"
|
||||||
|
echo "================================================"
|
||||||
|
echo ""
|
||||||
|
echo "SPARK has been removed from your system."
|
||||||
|
|
||||||
|
if [ "$REMOVE_CONFIG" = "no" ] && [ -d "/etc/spark" ]; then
|
||||||
|
echo ""
|
||||||
|
echo "Configuration has been preserved in /etc/spark"
|
||||||
|
echo "To remove it, run: sudo rm -rf /etc/spark"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$REMOVE_DATA" = "no" ]; then
|
||||||
|
if [ -d "/var/lib/spark" ] || [ -d "/var/log/spark" ]; then
|
||||||
|
echo ""
|
||||||
|
echo "Data and logs have been preserved in:"
|
||||||
|
[ -d "/var/lib/spark" ] && echo " - /var/lib/spark"
|
||||||
|
[ -d "/var/log/spark" ] && echo " - /var/log/spark"
|
||||||
|
echo "To remove them, run:"
|
||||||
|
[ -d "/var/lib/spark" ] && echo " sudo rm -rf /var/lib/spark"
|
||||||
|
[ -d "/var/log/spark" ] && echo " sudo rm -rf /var/log/spark"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Thank you for using SPARK!"
|
||||||
|
echo ""
|
||||||
Reference in New Issue
Block a user