Compare commits
49 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| d3b78054ad | |||
| d2ae35f0ce | |||
| a605477663 | |||
| ba98086548 | |||
| 0b3c22556b | |||
| 069e6e6c8f | |||
| 10598520d8 | |||
| 075b7946b1 | |||
| f47fca3304 | |||
| 575e010a6b | |||
| 60a5dc4663 | |||
| 36d80b1e27 | |||
| 465cf0ee72 | |||
| bd5cd5c0cb | |||
| b622565e34 | |||
| 56376121ab | |||
| e3359d1235 | |||
| f1eeec6922 | |||
| 69362bb529 | |||
| 857fcc50ba | |||
| 5d0df006eb | |||
| e6256502ce | |||
| d5dc141171 | |||
| 2538f5ae2c | |||
| 4613193dcc | |||
| 848b3afe54 | |||
| dd86bae942 | |||
| 4691c61544 | |||
| dfb2d3b340 | |||
| 6a19ab05e3 | |||
| 7b718da7a2 | |||
| ebaf545418 | |||
| 2cdfdaed55 | |||
| 2216804652 | |||
| 1b177037f5 | |||
| 9d6590927c | |||
| eaf401200c | |||
| e97a4d53ae | |||
| ca2b3b25a5 | |||
| 19703de50d | |||
| bcab4f274e | |||
| 64e947735f | |||
| 1e05c08002 | |||
| 167df321f9 | |||
| 49998c4c32 | |||
| 8045ec38df | |||
| 793fb18b43 | |||
| 09534fd899 | |||
| 5f3783a5e9 |
@@ -12,6 +12,8 @@ jobs:
|
|||||||
check:
|
check:
|
||||||
name: Type Check & Lint
|
name: Type Check & Lint
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: code.foss.global/host.today/ht-docker-node:latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
@@ -22,6 +24,9 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
deno-version: v2.x
|
deno-version: v2.x
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: deno install --entrypoint mod.ts
|
||||||
|
|
||||||
- name: Check TypeScript types
|
- name: Check TypeScript types
|
||||||
run: deno check mod.ts
|
run: deno check mod.ts
|
||||||
|
|
||||||
@@ -36,6 +41,8 @@ jobs:
|
|||||||
build:
|
build:
|
||||||
name: Build Test (Current Platform)
|
name: Build Test (Current Platform)
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: code.foss.global/host.today/ht-docker-node:latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
@@ -46,10 +53,21 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
deno-version: v2.x
|
deno-version: v2.x
|
||||||
|
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '22'
|
||||||
|
|
||||||
|
- name: Enable corepack
|
||||||
|
run: corepack enable
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: pnpm install --ignore-scripts
|
||||||
|
|
||||||
- name: Compile for current platform
|
- name: Compile for current platform
|
||||||
run: |
|
run: |
|
||||||
echo "Testing compilation for Linux x86_64..."
|
echo "Testing compilation for Linux x86_64..."
|
||||||
deno compile --allow-all --no-check \
|
npx tsdeno compile --allow-all --no-check \
|
||||||
--output onebox-test \
|
--output onebox-test \
|
||||||
--target x86_64-unknown-linux-gnu mod.ts
|
--target x86_64-unknown-linux-gnu mod.ts
|
||||||
|
|
||||||
@@ -62,6 +80,8 @@ jobs:
|
|||||||
build-all:
|
build-all:
|
||||||
name: Build All Platforms
|
name: Build All Platforms
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: code.foss.global/host.today/ht-docker-node:latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
@@ -72,8 +92,19 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
deno-version: v2.x
|
deno-version: v2.x
|
||||||
|
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '22'
|
||||||
|
|
||||||
|
- name: Enable corepack
|
||||||
|
run: corepack enable
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: pnpm install --ignore-scripts
|
||||||
|
|
||||||
- name: Compile all platform binaries
|
- name: Compile all platform binaries
|
||||||
run: bash scripts/compile-all.sh
|
run: mkdir -p dist/binaries && npx tsdeno compile
|
||||||
|
|
||||||
- name: Upload all binaries as artifact
|
- name: Upload all binaries as artifact
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
|
|||||||
@@ -8,6 +8,8 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
npm-publish:
|
npm-publish:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: code.foss.global/host.today/ht-docker-node:latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
|
|||||||
@@ -8,6 +8,8 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
build-and-release:
|
build-and-release:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: code.foss.global/host.today/ht-docker-node:latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
@@ -20,6 +22,17 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
deno-version: v2.x
|
deno-version: v2.x
|
||||||
|
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '22'
|
||||||
|
|
||||||
|
- name: Enable corepack
|
||||||
|
run: corepack enable
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: pnpm install --ignore-scripts
|
||||||
|
|
||||||
- name: Get version from tag
|
- name: Get version from tag
|
||||||
id: version
|
id: version
|
||||||
run: |
|
run: |
|
||||||
@@ -41,57 +54,7 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Compile binaries for all platforms
|
- name: Compile binaries for all platforms
|
||||||
run: |
|
run: mkdir -p dist/binaries && npx tsdeno compile
|
||||||
echo "================================================"
|
|
||||||
echo " Onebox Release Compilation"
|
|
||||||
echo " Version: ${{ steps.version.outputs.version }}"
|
|
||||||
echo "================================================"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Clean up old binaries and create fresh directory
|
|
||||||
rm -rf dist/binaries
|
|
||||||
mkdir -p dist/binaries
|
|
||||||
echo "-> Cleaned old binaries from dist/binaries"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Linux x86_64
|
|
||||||
echo "-> Compiling for Linux x86_64..."
|
|
||||||
deno compile --allow-all --no-check \
|
|
||||||
--output dist/binaries/onebox-linux-x64 \
|
|
||||||
--target x86_64-unknown-linux-gnu mod.ts
|
|
||||||
echo " Done: Linux x86_64"
|
|
||||||
|
|
||||||
# Linux ARM64
|
|
||||||
echo "-> Compiling for Linux ARM64..."
|
|
||||||
deno compile --allow-all --no-check \
|
|
||||||
--output dist/binaries/onebox-linux-arm64 \
|
|
||||||
--target aarch64-unknown-linux-gnu mod.ts
|
|
||||||
echo " Done: Linux ARM64"
|
|
||||||
|
|
||||||
# macOS x86_64
|
|
||||||
echo "-> Compiling for macOS x86_64..."
|
|
||||||
deno compile --allow-all --no-check \
|
|
||||||
--output dist/binaries/onebox-macos-x64 \
|
|
||||||
--target x86_64-apple-darwin mod.ts
|
|
||||||
echo " Done: macOS x86_64"
|
|
||||||
|
|
||||||
# macOS ARM64
|
|
||||||
echo "-> Compiling for macOS ARM64..."
|
|
||||||
deno compile --allow-all --no-check \
|
|
||||||
--output dist/binaries/onebox-macos-arm64 \
|
|
||||||
--target aarch64-apple-darwin mod.ts
|
|
||||||
echo " Done: macOS ARM64"
|
|
||||||
|
|
||||||
# Windows x86_64
|
|
||||||
echo "-> Compiling for Windows x86_64..."
|
|
||||||
deno compile --allow-all --no-check \
|
|
||||||
--output dist/binaries/onebox-windows-x64.exe \
|
|
||||||
--target x86_64-pc-windows-msvc mod.ts
|
|
||||||
echo " Done: Windows x86_64"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "All binaries compiled successfully!"
|
|
||||||
ls -lh dist/binaries/
|
|
||||||
|
|
||||||
- name: Generate SHA256 checksums
|
- name: Generate SHA256 checksums
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
124
changelog.md
124
changelog.md
@@ -1,5 +1,129 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 2026-03-16 - 1.14.4 - fix(repo)
|
||||||
|
no changes to commit
|
||||||
|
|
||||||
|
|
||||||
|
## 2026-03-16 - 1.14.3 - fix(repo)
|
||||||
|
no changes to commit
|
||||||
|
|
||||||
|
|
||||||
|
## 2026-03-16 - 1.14.2 - fix(repo)
|
||||||
|
no changes to commit
|
||||||
|
|
||||||
|
|
||||||
|
## 2026-03-16 - 1.14.1 - fix(repo)
|
||||||
|
no changes to commit
|
||||||
|
|
||||||
|
|
||||||
|
## 2026-03-16 - 1.14.0 - feat(daemon)
|
||||||
|
auto-install Docker and initialize Swarm during daemon service setup
|
||||||
|
|
||||||
|
- Adds a Docker availability check before installing the Onebox daemon service
|
||||||
|
- Installs Docker automatically when it is missing using the standard installation script
|
||||||
|
- Attempts to initialize Docker Swarm after installation and handles already-initialized environments gracefully
|
||||||
|
|
||||||
|
## 2026-03-16 - 1.13.17 - fix(ci)
|
||||||
|
remove forced container image pulling from Gitea workflow jobs
|
||||||
|
|
||||||
|
- Drops the `--pull always` container option from CI, npm publish, and release workflows.
|
||||||
|
- Keeps workflow container images unchanged while avoiding forced pulls on every job run.
|
||||||
|
|
||||||
|
## 2026-03-16 - 1.13.16 - fix(ci)
|
||||||
|
refresh workflow container images on every run and bump @apiclient.xyz/docker to ^5.1.1
|
||||||
|
|
||||||
|
- add --pull always to CI, release, and npm publish workflow containers to avoid stale images
|
||||||
|
- update @apiclient.xyz/docker from ^5.1.0 to ^5.1.1 in deno.json
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.15 - fix(repo)
|
||||||
|
no changes to commit
|
||||||
|
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.14 - fix(repo)
|
||||||
|
no changes to commit
|
||||||
|
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.13 - fix(repo)
|
||||||
|
no changes to commit
|
||||||
|
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.12 - fix(ci)
|
||||||
|
run pnpm install with --ignore-scripts in CI and release workflows
|
||||||
|
|
||||||
|
- Update CI workflow dependency installation steps to skip lifecycle scripts during builds.
|
||||||
|
- Apply the same install change to the release workflow for consistent automation behavior.
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.11 - fix(project)
|
||||||
|
no changes to commit
|
||||||
|
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.10 - fix(deps)
|
||||||
|
bump @git.zone/tsdeno to ^1.2.0
|
||||||
|
|
||||||
|
- Updates the tsdeno development dependency from ^1.1.1 to ^1.2.0.
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.9 - fix(repo)
|
||||||
|
no changes to commit
|
||||||
|
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.8 - fix(repo)
|
||||||
|
no changes to commit
|
||||||
|
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.7 - fix(repo)
|
||||||
|
no changes to commit
|
||||||
|
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.6 - fix(ci)
|
||||||
|
correct workflow container image registry path
|
||||||
|
|
||||||
|
- Update Gitea CI, release, and npm publish workflows to use the corrected ht-docker-node image path
|
||||||
|
- Align all workflow container references from hosttoday to host.today to prevent pipeline image resolution issues
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.5 - fix(workflows)
|
||||||
|
switch Gitea workflow containers from ht-docker-dbase to ht-docker-node
|
||||||
|
|
||||||
|
- Updates the CI, release, and npm publish workflows to use the Node-focused container image consistently.
|
||||||
|
- Aligns workflow runtime images with the project's Node and Deno build and publish steps.
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.4 - fix(ci)
|
||||||
|
run workflows in the shared build container and enable corepack for pnpm installs
|
||||||
|
|
||||||
|
- adds the ht-docker-dbase container image to CI, release, and npm publish workflows
|
||||||
|
- enables corepack before pnpm install in build and release jobs to ensure package manager availability
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.3 - fix(build)
|
||||||
|
replace custom Deno compile scripts with tsdeno-based binary builds in CI and release workflows
|
||||||
|
|
||||||
|
- adds @git.zone/tsdeno as a dev dependency and configures compile targets in npmextra.json
|
||||||
|
- updates CI and release workflows to install Node.js dependencies before running tsdeno compile
|
||||||
|
- removes the legacy scripts/compile-all.sh script and points the compile task to tsdeno compile
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.2 - fix(scripts)
|
||||||
|
install production dependencies before compiling binaries and exclude local node_modules from builds
|
||||||
|
|
||||||
|
- Adds a dependency installation step using the application entrypoint before cross-platform compilation
|
||||||
|
- Updates all deno compile targets to use --node-modules-dir=none to avoid bundling local node_modules
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.1 - fix(deno)
|
||||||
|
remove nodeModulesDir from Deno configuration
|
||||||
|
|
||||||
|
- Drops the explicit nodeModulesDir setting from deno.json.
|
||||||
|
- Keeps the package version unchanged at 1.13.0 while simplifying runtime configuration.
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.0 - feat(install)
|
||||||
|
improve installer with version selection, service restart handling, and upgrade documentation
|
||||||
|
|
||||||
|
- Adds installer command-line options for help, specific version selection, and custom install directory.
|
||||||
|
- Fetches the latest release from the Gitea API when no version is provided and installs the matching platform binary.
|
||||||
|
- Preserves Onebox data directories, stops and restarts the systemd service during updates, and refreshes installation instructions in the README including upgrade usage.
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.12.1 - fix(package.json)
|
||||||
|
update package metadata
|
||||||
|
|
||||||
|
- Single metadata-only file changed (+1, -1)
|
||||||
|
- No source code or runtime behavior modified; safe patch release
|
||||||
|
|
||||||
## 2026-03-15 - 1.12.0 - feat(cli,release)
|
## 2026-03-15 - 1.12.0 - feat(cli,release)
|
||||||
add self-upgrade command and automate CI, release, and npm publishing workflows
|
add self-upgrade command and automate CI, release, and npm publishing workflows
|
||||||
|
|
||||||
|
|||||||
@@ -1,12 +1,11 @@
|
|||||||
{
|
{
|
||||||
"name": "@serve.zone/onebox",
|
"name": "@serve.zone/onebox",
|
||||||
"version": "1.12.0",
|
"version": "1.14.4",
|
||||||
"exports": "./mod.ts",
|
"exports": "./mod.ts",
|
||||||
"nodeModulesDir": "auto",
|
|
||||||
"tasks": {
|
"tasks": {
|
||||||
"test": "deno test --allow-all test/",
|
"test": "deno test --allow-all test/",
|
||||||
"test:watch": "deno test --allow-all --watch test/",
|
"test:watch": "deno test --allow-all --watch test/",
|
||||||
"compile": "bash scripts/compile-all.sh",
|
"compile": "tsdeno compile",
|
||||||
"dev": "pnpm run watch"
|
"dev": "pnpm run watch"
|
||||||
},
|
},
|
||||||
"imports": {
|
"imports": {
|
||||||
@@ -17,7 +16,7 @@
|
|||||||
"@std/encoding": "jsr:@std/encoding@^1.0.10",
|
"@std/encoding": "jsr:@std/encoding@^1.0.10",
|
||||||
"@db/sqlite": "jsr:@db/sqlite@0.12.0",
|
"@db/sqlite": "jsr:@db/sqlite@0.12.0",
|
||||||
"@push.rocks/smartdaemon": "npm:@push.rocks/smartdaemon@^2.1.0",
|
"@push.rocks/smartdaemon": "npm:@push.rocks/smartdaemon@^2.1.0",
|
||||||
"@apiclient.xyz/docker": "npm:@apiclient.xyz/docker@^5.1.0",
|
"@apiclient.xyz/docker": "npm:@apiclient.xyz/docker@^5.1.1",
|
||||||
"@apiclient.xyz/cloudflare": "npm:@apiclient.xyz/cloudflare@6.4.3",
|
"@apiclient.xyz/cloudflare": "npm:@apiclient.xyz/cloudflare@6.4.3",
|
||||||
"@push.rocks/smartacme": "npm:@push.rocks/smartacme@^8.0.0",
|
"@push.rocks/smartacme": "npm:@push.rocks/smartacme@^8.0.0",
|
||||||
"@push.rocks/smartregistry": "npm:@push.rocks/smartregistry@^2.2.0",
|
"@push.rocks/smartregistry": "npm:@push.rocks/smartregistry@^2.2.0",
|
||||||
|
|||||||
446
install.sh
446
install.sh
@@ -1,192 +1,308 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Onebox Installer Script
|
||||||
|
# Downloads and installs pre-compiled Onebox binary from Gitea releases
|
||||||
#
|
#
|
||||||
# Onebox installer script
|
# Usage:
|
||||||
|
# Direct piped installation (recommended):
|
||||||
|
# curl -sSL https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh | sudo bash
|
||||||
#
|
#
|
||||||
|
# With version specification:
|
||||||
|
# curl -sSL https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh | sudo bash -s -- --version v1.11.0
|
||||||
|
#
|
||||||
|
# Options:
|
||||||
|
# -h, --help Show this help message
|
||||||
|
# --version VERSION Install specific version (e.g., v1.11.0)
|
||||||
|
# --install-dir DIR Installation directory (default: /opt/onebox)
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
# Configuration
|
# Default values
|
||||||
REPO_URL="https://code.foss.global/serve.zone/onebox"
|
SHOW_HELP=0
|
||||||
|
SPECIFIED_VERSION=""
|
||||||
INSTALL_DIR="/opt/onebox"
|
INSTALL_DIR="/opt/onebox"
|
||||||
BIN_LINK="/usr/local/bin/onebox"
|
GITEA_BASE_URL="https://code.foss.global"
|
||||||
|
GITEA_REPO="serve.zone/onebox"
|
||||||
|
SERVICE_NAME="smartdaemon_onebox"
|
||||||
|
|
||||||
# Colors
|
# Parse command line arguments
|
||||||
RED='\033[0;31m'
|
while [[ $# -gt 0 ]]; do
|
||||||
GREEN='\033[0;32m'
|
case $1 in
|
||||||
YELLOW='\033[1;33m'
|
-h|--help)
|
||||||
NC='\033[0m' # No Color
|
SHOW_HELP=1
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--version)
|
||||||
|
SPECIFIED_VERSION="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--install-dir)
|
||||||
|
INSTALL_DIR="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unknown option: $1"
|
||||||
|
echo "Use -h or --help for usage information"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
# Functions
|
if [ $SHOW_HELP -eq 1 ]; then
|
||||||
error() {
|
echo "Onebox Installer Script"
|
||||||
echo -e "${RED}Error: $1${NC}" >&2
|
echo "Downloads and installs pre-compiled Onebox binary"
|
||||||
exit 1
|
echo ""
|
||||||
}
|
echo "Usage: $0 [options]"
|
||||||
|
echo ""
|
||||||
info() {
|
echo "Options:"
|
||||||
echo -e "${GREEN}$1${NC}"
|
echo " -h, --help Show this help message"
|
||||||
}
|
echo " --version VERSION Install specific version (e.g., v1.11.0)"
|
||||||
|
echo " --install-dir DIR Installation directory (default: /opt/onebox)"
|
||||||
warn() {
|
echo ""
|
||||||
echo -e "${YELLOW}$1${NC}"
|
echo "Examples:"
|
||||||
}
|
echo " # Install latest version"
|
||||||
|
echo " curl -sSL https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh | sudo bash"
|
||||||
# Detect platform and architecture
|
echo ""
|
||||||
detect_platform() {
|
echo " # Install specific version"
|
||||||
OS=$(uname -s | tr '[:upper:]' '[:lower:]')
|
echo " curl -sSL https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh | sudo bash -s -- --version v1.11.0"
|
||||||
ARCH=$(uname -m)
|
exit 0
|
||||||
|
fi
|
||||||
case "$OS" in
|
|
||||||
linux)
|
|
||||||
PLATFORM="linux"
|
|
||||||
;;
|
|
||||||
darwin)
|
|
||||||
PLATFORM="macos"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
error "Unsupported operating system: $OS"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
case "$ARCH" in
|
|
||||||
x86_64|amd64)
|
|
||||||
ARCH="x64"
|
|
||||||
;;
|
|
||||||
aarch64|arm64)
|
|
||||||
ARCH="arm64"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
error "Unsupported architecture: $ARCH"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
BINARY_NAME="onebox-${PLATFORM}-${ARCH}"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Get latest version from Gitea API
|
|
||||||
get_latest_version() {
|
|
||||||
info "Fetching latest version..."
|
|
||||||
VERSION=$(curl -s "${REPO_URL}/releases" | grep -o '"tag_name":"v[^"]*' | head -1 | cut -d'"' -f4 | cut -c2-)
|
|
||||||
|
|
||||||
if [ -z "$VERSION" ]; then
|
|
||||||
warn "Could not fetch latest version, using 'main' branch"
|
|
||||||
VERSION="main"
|
|
||||||
else
|
|
||||||
info "Latest version: v${VERSION}"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Check if running as root
|
# Check if running as root
|
||||||
check_root() {
|
if [ "$EUID" -ne 0 ]; then
|
||||||
if [ "$EUID" -ne 0 ]; then
|
echo "Please run as root (sudo bash install.sh or pipe to sudo bash)"
|
||||||
error "This script must be run as root (use sudo)"
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Helper function to detect OS and architecture
|
||||||
|
detect_platform() {
|
||||||
|
local os=$(uname -s)
|
||||||
|
local arch=$(uname -m)
|
||||||
|
|
||||||
|
# Map OS
|
||||||
|
case "$os" in
|
||||||
|
Linux)
|
||||||
|
os_name="linux"
|
||||||
|
;;
|
||||||
|
Darwin)
|
||||||
|
os_name="macos"
|
||||||
|
;;
|
||||||
|
MINGW*|MSYS*|CYGWIN*)
|
||||||
|
os_name="windows"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Error: Unsupported operating system: $os"
|
||||||
|
echo "Supported: Linux, macOS, Windows"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# Map architecture
|
||||||
|
case "$arch" in
|
||||||
|
x86_64|amd64)
|
||||||
|
arch_name="x64"
|
||||||
|
;;
|
||||||
|
aarch64|arm64)
|
||||||
|
arch_name="arm64"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Error: Unsupported architecture: $arch"
|
||||||
|
echo "Supported: x86_64/amd64 (x64), aarch64/arm64 (arm64)"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# Construct binary name
|
||||||
|
if [ "$os_name" = "windows" ]; then
|
||||||
|
echo "onebox-${os_name}-${arch_name}.exe"
|
||||||
|
else
|
||||||
|
echo "onebox-${os_name}-${arch_name}"
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Get latest release version from Gitea API
|
||||||
|
get_latest_version() {
|
||||||
|
echo "Fetching latest release version from Gitea..." >&2
|
||||||
|
|
||||||
|
local api_url="${GITEA_BASE_URL}/api/v1/repos/${GITEA_REPO}/releases/latest"
|
||||||
|
local response=$(curl -sSL "$api_url" 2>/dev/null)
|
||||||
|
|
||||||
|
if [ $? -ne 0 ] || [ -z "$response" ]; then
|
||||||
|
echo "Error: Failed to fetch latest release information from Gitea API" >&2
|
||||||
|
echo "URL: $api_url" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Extract tag_name from JSON response
|
||||||
|
local version=$(echo "$response" | grep -o '"tag_name":"[^"]*"' | cut -d'"' -f4)
|
||||||
|
|
||||||
|
if [ -z "$version" ]; then
|
||||||
|
echo "Error: Could not determine latest version from API response" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "$version"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main installation process
|
||||||
|
echo "================================================"
|
||||||
|
echo " Onebox Installation Script"
|
||||||
|
echo "================================================"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Detect platform
|
||||||
|
BINARY_NAME=$(detect_platform)
|
||||||
|
echo "Detected platform: $BINARY_NAME"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Determine version to install
|
||||||
|
if [ -n "$SPECIFIED_VERSION" ]; then
|
||||||
|
VERSION="$SPECIFIED_VERSION"
|
||||||
|
echo "Installing specified version: $VERSION"
|
||||||
|
else
|
||||||
|
VERSION=$(get_latest_version)
|
||||||
|
echo "Installing latest version: $VERSION"
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Construct download URL
|
||||||
|
DOWNLOAD_URL="${GITEA_BASE_URL}/${GITEA_REPO}/releases/download/${VERSION}/${BINARY_NAME}"
|
||||||
|
echo "Download URL: $DOWNLOAD_URL"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Check if service is running and stop it
|
||||||
|
SERVICE_WAS_RUNNING=0
|
||||||
|
if systemctl is-enabled --quiet "$SERVICE_NAME" 2>/dev/null || systemctl is-active --quiet "$SERVICE_NAME" 2>/dev/null; then
|
||||||
|
SERVICE_WAS_RUNNING=1
|
||||||
|
if systemctl is-active --quiet "$SERVICE_NAME" 2>/dev/null; then
|
||||||
|
echo "Stopping Onebox service..."
|
||||||
|
systemctl stop "$SERVICE_NAME"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean installation directory - ensure only binary exists
|
||||||
|
if [ -d "$INSTALL_DIR" ]; then
|
||||||
|
echo "Cleaning installation directory: $INSTALL_DIR"
|
||||||
|
rm -rf "$INSTALL_DIR"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create fresh installation directory
|
||||||
|
echo "Creating installation directory: $INSTALL_DIR"
|
||||||
|
mkdir -p "$INSTALL_DIR"
|
||||||
|
|
||||||
# Download binary
|
# Download binary
|
||||||
download_binary() {
|
echo "Downloading Onebox binary..."
|
||||||
info "Downloading Onebox ${VERSION} for ${PLATFORM}-${ARCH}..."
|
TEMP_FILE="$INSTALL_DIR/onebox.download"
|
||||||
|
curl -sSL "$DOWNLOAD_URL" -o "$TEMP_FILE"
|
||||||
|
|
||||||
# Create temp directory
|
if [ $? -ne 0 ]; then
|
||||||
TMP_DIR=$(mktemp -d)
|
echo "Error: Failed to download binary from $DOWNLOAD_URL"
|
||||||
TMP_FILE="${TMP_DIR}/${BINARY_NAME}"
|
echo ""
|
||||||
|
echo "Please check:"
|
||||||
|
echo " 1. Your internet connection"
|
||||||
|
echo " 2. The specified version exists: ${GITEA_BASE_URL}/${GITEA_REPO}/releases"
|
||||||
|
echo " 3. The platform binary is available for this release"
|
||||||
|
rm -f "$TEMP_FILE"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
# Try release download first
|
# Check if download was successful (file exists and not empty)
|
||||||
if [ "$VERSION" != "main" ]; then
|
if [ ! -s "$TEMP_FILE" ]; then
|
||||||
DOWNLOAD_URL="${REPO_URL}/releases/download/v${VERSION}/${BINARY_NAME}"
|
echo "Error: Downloaded file is empty or does not exist"
|
||||||
else
|
rm -f "$TEMP_FILE"
|
||||||
DOWNLOAD_URL="${REPO_URL}/raw/branch/main/dist/binaries/${BINARY_NAME}"
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! curl -L -f -o "$TMP_FILE" "$DOWNLOAD_URL"; then
|
# Move to final location
|
||||||
error "Failed to download binary from $DOWNLOAD_URL"
|
BINARY_PATH="$INSTALL_DIR/onebox"
|
||||||
fi
|
mv "$TEMP_FILE" "$BINARY_PATH"
|
||||||
|
|
||||||
# Verify download
|
if [ $? -ne 0 ] || [ ! -f "$BINARY_PATH" ]; then
|
||||||
if [ ! -f "$TMP_FILE" ] || [ ! -s "$TMP_FILE" ]; then
|
echo "Error: Failed to move binary to $BINARY_PATH"
|
||||||
error "Downloaded file is empty or missing"
|
rm -f "$TEMP_FILE" 2>/dev/null
|
||||||
fi
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
info "✓ Download complete"
|
# Make executable
|
||||||
}
|
chmod +x "$BINARY_PATH"
|
||||||
|
|
||||||
# Install binary
|
if [ $? -ne 0 ]; then
|
||||||
install_binary() {
|
echo "Error: Failed to make binary executable"
|
||||||
info "Installing Onebox to ${INSTALL_DIR}..."
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
# Create install directory
|
echo "Binary installed successfully to: $BINARY_PATH"
|
||||||
mkdir -p "$INSTALL_DIR"
|
echo ""
|
||||||
|
|
||||||
# Copy binary
|
# Check if /usr/local/bin is in PATH
|
||||||
cp "$TMP_FILE" "${INSTALL_DIR}/onebox"
|
if [[ ":$PATH:" == *":/usr/local/bin:"* ]]; then
|
||||||
chmod +x "${INSTALL_DIR}/onebox"
|
BIN_DIR="/usr/local/bin"
|
||||||
|
else
|
||||||
|
BIN_DIR="/usr/bin"
|
||||||
|
fi
|
||||||
|
|
||||||
# Create symlink
|
# Create symlink for global access
|
||||||
ln -sf "${INSTALL_DIR}/onebox" "$BIN_LINK"
|
ln -sf "$BINARY_PATH" "$BIN_DIR/onebox"
|
||||||
|
echo "Symlink created: $BIN_DIR/onebox -> $BINARY_PATH"
|
||||||
|
echo ""
|
||||||
|
|
||||||
# Cleanup temp files
|
# Create data directories
|
||||||
rm -rf "$TMP_DIR"
|
mkdir -p /var/lib/onebox
|
||||||
|
mkdir -p /var/www/certbot
|
||||||
|
|
||||||
info "✓ Installation complete"
|
# Restart service if it was running before update
|
||||||
}
|
if [ $SERVICE_WAS_RUNNING -eq 1 ]; then
|
||||||
|
echo "Restarting Onebox service..."
|
||||||
|
systemctl restart "$SERVICE_NAME"
|
||||||
|
echo "Service restarted successfully."
|
||||||
|
echo ""
|
||||||
|
fi
|
||||||
|
|
||||||
# Initialize database and config
|
echo "================================================"
|
||||||
initialize() {
|
echo " Onebox Installation Complete!"
|
||||||
info "Initializing Onebox..."
|
echo "================================================"
|
||||||
|
echo ""
|
||||||
|
echo "Installation details:"
|
||||||
|
echo " Binary location: $BINARY_PATH"
|
||||||
|
echo " Symlink location: $BIN_DIR/onebox"
|
||||||
|
echo " Version: $VERSION"
|
||||||
|
echo ""
|
||||||
|
|
||||||
# Create data directory
|
# Check if database exists (indicates existing installation)
|
||||||
mkdir -p /var/lib/onebox
|
if [ -f "/var/lib/onebox/onebox.db" ]; then
|
||||||
|
echo "Data directory: /var/lib/onebox (preserved)"
|
||||||
# Create certbot directory for ACME challenges
|
echo ""
|
||||||
mkdir -p /var/www/certbot
|
echo "Your existing data has been preserved."
|
||||||
|
if [ $SERVICE_WAS_RUNNING -eq 1 ]; then
|
||||||
info "✓ Initialization complete"
|
echo "The service has been restarted with your current settings."
|
||||||
}
|
else
|
||||||
|
echo "Start the service with: onebox daemon start"
|
||||||
# Print success message
|
fi
|
||||||
print_success() {
|
else
|
||||||
echo ""
|
echo "Get started:"
|
||||||
info "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
echo ""
|
||||||
info " Onebox installed successfully!"
|
echo " onebox --version"
|
||||||
info "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
echo " onebox --help"
|
||||||
echo ""
|
echo ""
|
||||||
echo "Next steps:"
|
echo " 1. Configure Cloudflare (optional):"
|
||||||
echo ""
|
echo " onebox config set cloudflareAPIKey <key>"
|
||||||
echo "1. Configure Cloudflare (optional):"
|
echo " onebox config set cloudflareEmail <email>"
|
||||||
echo " onebox config set cloudflareAPIKey <key>"
|
echo " onebox config set cloudflareZoneID <zone-id>"
|
||||||
echo " onebox config set cloudflareEmail <email>"
|
echo " onebox config set serverIP <your-server-ip>"
|
||||||
echo " onebox config set cloudflareZoneID <zone-id>"
|
echo ""
|
||||||
echo " onebox config set serverIP <your-server-ip>"
|
echo " 2. Configure ACME email:"
|
||||||
echo ""
|
echo " onebox config set acmeEmail <your@email.com>"
|
||||||
echo "2. Configure ACME email:"
|
echo ""
|
||||||
echo " onebox config set acmeEmail <your@email.com>"
|
echo " 3. Install daemon:"
|
||||||
echo ""
|
echo " onebox daemon install"
|
||||||
echo "3. Install daemon:"
|
echo ""
|
||||||
echo " onebox daemon install"
|
echo " 4. Start daemon:"
|
||||||
echo ""
|
echo " onebox daemon start"
|
||||||
echo "4. Start daemon:"
|
echo ""
|
||||||
echo " onebox daemon start"
|
echo " 5. Deploy your first service:"
|
||||||
echo ""
|
echo " onebox service add myapp --image nginx:latest --domain app.example.com"
|
||||||
echo "5. Deploy your first service:"
|
echo ""
|
||||||
echo " onebox service add myapp --image nginx:latest --domain app.example.com"
|
echo " Web UI: http://localhost:3000"
|
||||||
echo ""
|
echo " Default credentials: admin / admin"
|
||||||
echo "Web UI: http://localhost:3000"
|
fi
|
||||||
echo "Default credentials: admin / admin"
|
echo ""
|
||||||
echo ""
|
|
||||||
}
|
|
||||||
|
|
||||||
# Main installation flow
|
|
||||||
main() {
|
|
||||||
info "Onebox Installer"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
check_root
|
|
||||||
detect_platform
|
|
||||||
get_latest_version
|
|
||||||
download_binary
|
|
||||||
install_binary
|
|
||||||
initialize
|
|
||||||
print_success
|
|
||||||
}
|
|
||||||
|
|
||||||
# Run main function
|
|
||||||
main
|
|
||||||
|
|||||||
@@ -11,6 +11,26 @@
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"@git.zone/tsdeno": {
|
||||||
|
"compileTargets": [
|
||||||
|
{
|
||||||
|
"name": "onebox-linux-x64",
|
||||||
|
"entryPoint": "mod.ts",
|
||||||
|
"outDir": "dist/binaries",
|
||||||
|
"target": "x86_64-unknown-linux-gnu",
|
||||||
|
"permissions": ["--allow-all"],
|
||||||
|
"noCheck": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "onebox-linux-arm64",
|
||||||
|
"entryPoint": "mod.ts",
|
||||||
|
"outDir": "dist/binaries",
|
||||||
|
"target": "aarch64-unknown-linux-gnu",
|
||||||
|
"permissions": ["--allow-all"],
|
||||||
|
"noCheck": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"@git.zone/tswatch": {
|
"@git.zone/tswatch": {
|
||||||
"bundles": [
|
"bundles": [
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@serve.zone/onebox",
|
"name": "@serve.zone/onebox",
|
||||||
"version": "1.12.0",
|
"version": "1.14.4",
|
||||||
"description": "Self-hosted container platform with automatic SSL and DNS - a mini Heroku for single servers",
|
"description": "Self-hosted container platform with automatic SSL and DNS - a mini Heroku for single servers",
|
||||||
"main": "mod.ts",
|
"main": "mod.ts",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
@@ -61,6 +61,7 @@
|
|||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbundle": "^2.9.0",
|
"@git.zone/tsbundle": "^2.9.0",
|
||||||
|
"@git.zone/tsdeno": "^1.2.0",
|
||||||
"@git.zone/tswatch": "^3.2.0"
|
"@git.zone/tswatch": "^3.2.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
929
pnpm-lock.yaml
generated
929
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
16
readme.md
16
readme.md
@@ -47,10 +47,11 @@ For reporting bugs, issues, or security vulnerabilities, please visit [community
|
|||||||
### Installation
|
### Installation
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Download the latest release for your platform
|
# One-line install (recommended)
|
||||||
curl -sSL https://code.foss.global/serve.zone/onebox/releases/latest/download/onebox-linux-x64 -o onebox
|
curl -sSL https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh | sudo bash
|
||||||
chmod +x onebox
|
|
||||||
sudo mv onebox /usr/local/bin/
|
# Install a specific version
|
||||||
|
curl -sSL https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh | sudo bash -s -- --version v1.11.0
|
||||||
|
|
||||||
# Or install from npm
|
# Or install from npm
|
||||||
pnpm install -g @serve.zone/onebox
|
pnpm install -g @serve.zone/onebox
|
||||||
@@ -242,6 +243,13 @@ onebox config set cloudflareZoneID your-zone-id
|
|||||||
onebox status
|
onebox status
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Upgrade
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Upgrade to the latest version (requires root)
|
||||||
|
sudo onebox upgrade
|
||||||
|
```
|
||||||
|
|
||||||
## Configuration 🔧
|
## Configuration 🔧
|
||||||
|
|
||||||
### System Requirements
|
### System Requirements
|
||||||
|
|||||||
@@ -1,56 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
#
|
|
||||||
# Compile Onebox for all platforms
|
|
||||||
#
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
VERSION=$(grep '"version"' deno.json | cut -d'"' -f4)
|
|
||||||
echo "Compiling Onebox v${VERSION} for all platforms..."
|
|
||||||
|
|
||||||
# Create dist directory
|
|
||||||
mkdir -p dist/binaries
|
|
||||||
|
|
||||||
# Compile for each platform
|
|
||||||
echo "Compiling for Linux x64..."
|
|
||||||
deno compile --allow-all --no-check \
|
|
||||||
--output "dist/binaries/onebox-linux-x64" \
|
|
||||||
--target x86_64-unknown-linux-gnu \
|
|
||||||
mod.ts
|
|
||||||
|
|
||||||
echo "Compiling for Linux ARM64..."
|
|
||||||
deno compile --allow-all --no-check \
|
|
||||||
--output "dist/binaries/onebox-linux-arm64" \
|
|
||||||
--target aarch64-unknown-linux-gnu \
|
|
||||||
mod.ts
|
|
||||||
|
|
||||||
echo "Compiling for macOS x64..."
|
|
||||||
deno compile --allow-all --no-check \
|
|
||||||
--output "dist/binaries/onebox-macos-x64" \
|
|
||||||
--target x86_64-apple-darwin \
|
|
||||||
mod.ts
|
|
||||||
|
|
||||||
echo "Compiling for macOS ARM64..."
|
|
||||||
deno compile --allow-all --no-check \
|
|
||||||
--output "dist/binaries/onebox-macos-arm64" \
|
|
||||||
--target aarch64-apple-darwin \
|
|
||||||
mod.ts
|
|
||||||
|
|
||||||
echo "Compiling for Windows x64..."
|
|
||||||
deno compile --allow-all --no-check \
|
|
||||||
--output "dist/binaries/onebox-windows-x64.exe" \
|
|
||||||
--target x86_64-pc-windows-msvc \
|
|
||||||
mod.ts
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "✓ Compilation complete!"
|
|
||||||
echo ""
|
|
||||||
echo "Binaries:"
|
|
||||||
ls -lh dist/binaries/
|
|
||||||
echo ""
|
|
||||||
echo "Next steps:"
|
|
||||||
echo "1. Test binaries on their respective platforms"
|
|
||||||
echo "2. Create git tag: git tag v${VERSION}"
|
|
||||||
echo "3. Push tag: git push origin v${VERSION}"
|
|
||||||
echo "4. Upload binaries to Gitea release"
|
|
||||||
echo "5. Publish to npm: pnpm publish"
|
|
||||||
@@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@serve.zone/onebox',
|
name: '@serve.zone/onebox',
|
||||||
version: '1.12.0',
|
version: '1.14.4',
|
||||||
description: 'Self-hosted container platform with automatic SSL and DNS - a mini Heroku for single servers'
|
description: 'Self-hosted container platform with automatic SSL and DNS - a mini Heroku for single servers'
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -46,6 +46,59 @@ export class OneboxDaemon {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ensure Docker is installed, installing it if necessary
|
||||||
|
*/
|
||||||
|
private async ensureDocker(): Promise<void> {
|
||||||
|
try {
|
||||||
|
const cmd = new Deno.Command('docker', {
|
||||||
|
args: ['--version'],
|
||||||
|
stdout: 'piped',
|
||||||
|
stderr: 'piped',
|
||||||
|
});
|
||||||
|
const result = await cmd.output();
|
||||||
|
if (result.success) {
|
||||||
|
const version = new TextDecoder().decode(result.stdout).trim();
|
||||||
|
logger.info(`Docker found: ${version}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// docker command not found
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Docker not found. Installing Docker...');
|
||||||
|
const installCmd = new Deno.Command('bash', {
|
||||||
|
args: ['-c', 'curl -fsSL https://get.docker.com | sh'],
|
||||||
|
stdin: 'inherit',
|
||||||
|
stdout: 'inherit',
|
||||||
|
stderr: 'inherit',
|
||||||
|
});
|
||||||
|
const installResult = await installCmd.output();
|
||||||
|
if (!installResult.success) {
|
||||||
|
throw new Error('Failed to install Docker. Please install it manually: curl -fsSL https://get.docker.com | sh');
|
||||||
|
}
|
||||||
|
logger.success('Docker installed successfully');
|
||||||
|
|
||||||
|
// Initialize Docker Swarm
|
||||||
|
logger.info('Initializing Docker Swarm...');
|
||||||
|
const swarmCmd = new Deno.Command('docker', {
|
||||||
|
args: ['swarm', 'init'],
|
||||||
|
stdout: 'piped',
|
||||||
|
stderr: 'piped',
|
||||||
|
});
|
||||||
|
const swarmResult = await swarmCmd.output();
|
||||||
|
if (swarmResult.success) {
|
||||||
|
logger.success('Docker Swarm initialized');
|
||||||
|
} else {
|
||||||
|
const stderr = new TextDecoder().decode(swarmResult.stderr);
|
||||||
|
if (stderr.includes('already part of a swarm')) {
|
||||||
|
logger.info('Docker Swarm already initialized');
|
||||||
|
} else {
|
||||||
|
logger.warn(`Docker Swarm init warning: ${stderr.trim()}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Install systemd service
|
* Install systemd service
|
||||||
*/
|
*/
|
||||||
@@ -53,6 +106,9 @@ export class OneboxDaemon {
|
|||||||
try {
|
try {
|
||||||
logger.info('Installing Onebox daemon service...');
|
logger.info('Installing Onebox daemon service...');
|
||||||
|
|
||||||
|
// Ensure Docker is installed
|
||||||
|
await this.ensureDocker();
|
||||||
|
|
||||||
// Initialize smartdaemon if needed
|
// Initialize smartdaemon if needed
|
||||||
if (!this.smartdaemon) {
|
if (!this.smartdaemon) {
|
||||||
this.smartdaemon = new plugins.smartdaemon.SmartDaemon();
|
this.smartdaemon = new plugins.smartdaemon.SmartDaemon();
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ import type {
|
|||||||
import type { TBindValue } from './types.ts';
|
import type { TBindValue } from './types.ts';
|
||||||
import { logger } from '../logging.ts';
|
import { logger } from '../logging.ts';
|
||||||
import { getErrorMessage } from '../utils/error.ts';
|
import { getErrorMessage } from '../utils/error.ts';
|
||||||
|
import { MigrationRunner } from './migrations/index.ts';
|
||||||
|
|
||||||
// Import repositories
|
// Import repositories
|
||||||
import {
|
import {
|
||||||
@@ -71,7 +72,8 @@ export class OneboxDatabase {
|
|||||||
await this.createTables();
|
await this.createTables();
|
||||||
|
|
||||||
// Run migrations if needed
|
// Run migrations if needed
|
||||||
await this.runMigrations();
|
const runner = new MigrationRunner(this.query.bind(this));
|
||||||
|
runner.run();
|
||||||
|
|
||||||
// Initialize repositories with bound query function
|
// Initialize repositories with bound query function
|
||||||
const queryFn = this.query.bind(this);
|
const queryFn = this.query.bind(this);
|
||||||
@@ -241,724 +243,6 @@ export class OneboxDatabase {
|
|||||||
/**
|
/**
|
||||||
* Run database migrations
|
* Run database migrations
|
||||||
*/
|
*/
|
||||||
private async runMigrations(): Promise<void> {
|
|
||||||
if (!this.db) throw new Error('Database not initialized');
|
|
||||||
|
|
||||||
try {
|
|
||||||
const currentVersion = this.getMigrationVersion();
|
|
||||||
logger.info(`Current database migration version: ${currentVersion}`);
|
|
||||||
|
|
||||||
// Migration 1: Initial schema
|
|
||||||
if (currentVersion === 0) {
|
|
||||||
logger.info('Setting initial migration version to 1');
|
|
||||||
this.setMigrationVersion(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migration 2: Convert timestamp columns from INTEGER to REAL
|
|
||||||
const updatedVersion = this.getMigrationVersion();
|
|
||||||
if (updatedVersion < 2) {
|
|
||||||
logger.info('Running migration 2: Converting timestamps to REAL...');
|
|
||||||
|
|
||||||
// SSL certificates
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE ssl_certificates_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
domain TEXT NOT NULL UNIQUE,
|
|
||||||
cert_path TEXT NOT NULL,
|
|
||||||
key_path TEXT NOT NULL,
|
|
||||||
full_chain_path TEXT NOT NULL,
|
|
||||||
expiry_date REAL NOT NULL,
|
|
||||||
issuer TEXT NOT NULL,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO ssl_certificates_new SELECT * FROM ssl_certificates`);
|
|
||||||
this.query(`DROP TABLE ssl_certificates`);
|
|
||||||
this.query(`ALTER TABLE ssl_certificates_new RENAME TO ssl_certificates`);
|
|
||||||
|
|
||||||
// Services
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE services_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
name TEXT NOT NULL UNIQUE,
|
|
||||||
image TEXT NOT NULL,
|
|
||||||
registry TEXT,
|
|
||||||
env_vars TEXT NOT NULL,
|
|
||||||
port INTEGER NOT NULL,
|
|
||||||
domain TEXT,
|
|
||||||
container_id TEXT,
|
|
||||||
status TEXT NOT NULL DEFAULT 'stopped',
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO services_new SELECT * FROM services`);
|
|
||||||
this.query(`DROP TABLE services`);
|
|
||||||
this.query(`ALTER TABLE services_new RENAME TO services`);
|
|
||||||
|
|
||||||
// Registries
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE registries_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
url TEXT NOT NULL UNIQUE,
|
|
||||||
username TEXT NOT NULL,
|
|
||||||
password_encrypted TEXT NOT NULL,
|
|
||||||
created_at REAL NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO registries_new SELECT * FROM registries`);
|
|
||||||
this.query(`DROP TABLE registries`);
|
|
||||||
this.query(`ALTER TABLE registries_new RENAME TO registries`);
|
|
||||||
|
|
||||||
// Nginx configs
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE nginx_configs_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
service_id INTEGER NOT NULL,
|
|
||||||
domain TEXT NOT NULL,
|
|
||||||
port INTEGER NOT NULL,
|
|
||||||
ssl_enabled INTEGER NOT NULL DEFAULT 0,
|
|
||||||
config_template TEXT NOT NULL,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL,
|
|
||||||
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO nginx_configs_new SELECT * FROM nginx_configs`);
|
|
||||||
this.query(`DROP TABLE nginx_configs`);
|
|
||||||
this.query(`ALTER TABLE nginx_configs_new RENAME TO nginx_configs`);
|
|
||||||
|
|
||||||
// DNS records
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE dns_records_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
domain TEXT NOT NULL UNIQUE,
|
|
||||||
type TEXT NOT NULL,
|
|
||||||
value TEXT NOT NULL,
|
|
||||||
cloudflare_id TEXT,
|
|
||||||
zone_id TEXT,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO dns_records_new SELECT * FROM dns_records`);
|
|
||||||
this.query(`DROP TABLE dns_records`);
|
|
||||||
this.query(`ALTER TABLE dns_records_new RENAME TO dns_records`);
|
|
||||||
|
|
||||||
// Metrics
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE metrics_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
service_id INTEGER NOT NULL,
|
|
||||||
timestamp REAL NOT NULL,
|
|
||||||
cpu_percent REAL NOT NULL,
|
|
||||||
memory_used INTEGER NOT NULL,
|
|
||||||
memory_limit INTEGER NOT NULL,
|
|
||||||
network_rx_bytes INTEGER NOT NULL,
|
|
||||||
network_tx_bytes INTEGER NOT NULL,
|
|
||||||
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO metrics_new SELECT * FROM metrics`);
|
|
||||||
this.query(`DROP TABLE metrics`);
|
|
||||||
this.query(`ALTER TABLE metrics_new RENAME TO metrics`);
|
|
||||||
this.query(`CREATE INDEX IF NOT EXISTS idx_metrics_service_timestamp ON metrics(service_id, timestamp DESC)`);
|
|
||||||
|
|
||||||
// Logs
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE logs_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
service_id INTEGER NOT NULL,
|
|
||||||
timestamp REAL NOT NULL,
|
|
||||||
message TEXT NOT NULL,
|
|
||||||
level TEXT NOT NULL,
|
|
||||||
source TEXT NOT NULL,
|
|
||||||
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO logs_new SELECT * FROM logs`);
|
|
||||||
this.query(`DROP TABLE logs`);
|
|
||||||
this.query(`ALTER TABLE logs_new RENAME TO logs`);
|
|
||||||
this.query(`CREATE INDEX IF NOT EXISTS idx_logs_service_timestamp ON logs(service_id, timestamp DESC)`);
|
|
||||||
|
|
||||||
// Users
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE users_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
username TEXT NOT NULL UNIQUE,
|
|
||||||
password_hash TEXT NOT NULL,
|
|
||||||
role TEXT NOT NULL DEFAULT 'user',
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO users_new SELECT * FROM users`);
|
|
||||||
this.query(`DROP TABLE users`);
|
|
||||||
this.query(`ALTER TABLE users_new RENAME TO users`);
|
|
||||||
|
|
||||||
// Settings
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE settings_new (
|
|
||||||
key TEXT PRIMARY KEY,
|
|
||||||
value TEXT NOT NULL,
|
|
||||||
updated_at REAL NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO settings_new SELECT * FROM settings`);
|
|
||||||
this.query(`DROP TABLE settings`);
|
|
||||||
this.query(`ALTER TABLE settings_new RENAME TO settings`);
|
|
||||||
|
|
||||||
// Migrations table itself
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE migrations_new (
|
|
||||||
version INTEGER PRIMARY KEY,
|
|
||||||
applied_at REAL NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO migrations_new SELECT * FROM migrations`);
|
|
||||||
this.query(`DROP TABLE migrations`);
|
|
||||||
this.query(`ALTER TABLE migrations_new RENAME TO migrations`);
|
|
||||||
|
|
||||||
this.setMigrationVersion(2);
|
|
||||||
logger.success('Migration 2 completed: All timestamps converted to REAL');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migration 3: Domain management tables
|
|
||||||
const version3 = this.getMigrationVersion();
|
|
||||||
if (version3 < 3) {
|
|
||||||
logger.info('Running migration 3: Creating domain management tables...');
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE domains (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
domain TEXT NOT NULL UNIQUE,
|
|
||||||
dns_provider TEXT,
|
|
||||||
cloudflare_zone_id TEXT,
|
|
||||||
is_obsolete INTEGER NOT NULL DEFAULT 0,
|
|
||||||
default_wildcard INTEGER NOT NULL DEFAULT 1,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE certificates (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
domain_id INTEGER NOT NULL,
|
|
||||||
cert_domain TEXT NOT NULL,
|
|
||||||
is_wildcard INTEGER NOT NULL DEFAULT 0,
|
|
||||||
cert_path TEXT NOT NULL,
|
|
||||||
key_path TEXT NOT NULL,
|
|
||||||
full_chain_path TEXT NOT NULL,
|
|
||||||
expiry_date REAL NOT NULL,
|
|
||||||
issuer TEXT NOT NULL,
|
|
||||||
is_valid INTEGER NOT NULL DEFAULT 1,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL,
|
|
||||||
FOREIGN KEY (domain_id) REFERENCES domains(id) ON DELETE CASCADE
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE cert_requirements (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
service_id INTEGER NOT NULL,
|
|
||||||
domain_id INTEGER NOT NULL,
|
|
||||||
subdomain TEXT NOT NULL,
|
|
||||||
certificate_id INTEGER,
|
|
||||||
status TEXT NOT NULL DEFAULT 'pending',
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL,
|
|
||||||
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE,
|
|
||||||
FOREIGN KEY (domain_id) REFERENCES domains(id) ON DELETE CASCADE,
|
|
||||||
FOREIGN KEY (certificate_id) REFERENCES certificates(id) ON DELETE SET NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
interface OldSslCert {
|
|
||||||
id?: number;
|
|
||||||
domain?: string;
|
|
||||||
cert_path?: string;
|
|
||||||
key_path?: string;
|
|
||||||
full_chain_path?: string;
|
|
||||||
expiry_date?: number;
|
|
||||||
issuer?: string;
|
|
||||||
created_at?: number;
|
|
||||||
updated_at?: number;
|
|
||||||
[key: number]: unknown;
|
|
||||||
}
|
|
||||||
const existingCerts = this.query<OldSslCert>('SELECT * FROM ssl_certificates');
|
|
||||||
|
|
||||||
const now = Date.now();
|
|
||||||
const domainMap = new Map<string, number>();
|
|
||||||
|
|
||||||
for (const cert of existingCerts) {
|
|
||||||
const domain = String(cert.domain ?? (cert as Record<number, unknown>)[1]);
|
|
||||||
if (!domainMap.has(domain)) {
|
|
||||||
this.query(
|
|
||||||
'INSERT INTO domains (domain, dns_provider, is_obsolete, default_wildcard, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)',
|
|
||||||
[domain, null, 0, 1, now, now]
|
|
||||||
);
|
|
||||||
const result = this.query<{ id?: number; [key: number]: unknown }>('SELECT last_insert_rowid() as id');
|
|
||||||
const domainId = result[0].id ?? (result[0] as Record<number, unknown>)[0];
|
|
||||||
domainMap.set(domain, Number(domainId));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const cert of existingCerts) {
|
|
||||||
const domain = String(cert.domain ?? (cert as Record<number, unknown>)[1]);
|
|
||||||
const domainId = domainMap.get(domain);
|
|
||||||
|
|
||||||
this.query(
|
|
||||||
`INSERT INTO certificates (
|
|
||||||
domain_id, cert_domain, is_wildcard, cert_path, key_path, full_chain_path,
|
|
||||||
expiry_date, issuer, is_valid, created_at, updated_at
|
|
||||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
|
||||||
[
|
|
||||||
domainId,
|
|
||||||
domain,
|
|
||||||
0,
|
|
||||||
String(cert.cert_path ?? (cert as Record<number, unknown>)[2]),
|
|
||||||
String(cert.key_path ?? (cert as Record<number, unknown>)[3]),
|
|
||||||
String(cert.full_chain_path ?? (cert as Record<number, unknown>)[4]),
|
|
||||||
Number(cert.expiry_date ?? (cert as Record<number, unknown>)[5]),
|
|
||||||
String(cert.issuer ?? (cert as Record<number, unknown>)[6]),
|
|
||||||
1,
|
|
||||||
Number(cert.created_at ?? (cert as Record<number, unknown>)[7]),
|
|
||||||
Number(cert.updated_at ?? (cert as Record<number, unknown>)[8])
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.query('DROP TABLE ssl_certificates');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_domains_cloudflare_zone ON domains(cloudflare_zone_id)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_certificates_domain ON certificates(domain_id)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_certificates_expiry ON certificates(expiry_date)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_cert_requirements_service ON cert_requirements(service_id)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_cert_requirements_domain ON cert_requirements(domain_id)');
|
|
||||||
|
|
||||||
this.setMigrationVersion(3);
|
|
||||||
logger.success('Migration 3 completed: Domain management tables created');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migration 4: Add Onebox Registry support columns
|
|
||||||
const version4 = this.getMigrationVersion();
|
|
||||||
if (version4 < 4) {
|
|
||||||
logger.info('Running migration 4: Adding Onebox Registry columns to services table...');
|
|
||||||
|
|
||||||
this.query(`ALTER TABLE services ADD COLUMN use_onebox_registry INTEGER DEFAULT 0`);
|
|
||||||
this.query(`ALTER TABLE services ADD COLUMN registry_repository TEXT`);
|
|
||||||
this.query(`ALTER TABLE services ADD COLUMN registry_token TEXT`);
|
|
||||||
this.query(`ALTER TABLE services ADD COLUMN registry_image_tag TEXT DEFAULT 'latest'`);
|
|
||||||
this.query(`ALTER TABLE services ADD COLUMN auto_update_on_push INTEGER DEFAULT 0`);
|
|
||||||
this.query(`ALTER TABLE services ADD COLUMN image_digest TEXT`);
|
|
||||||
|
|
||||||
this.setMigrationVersion(4);
|
|
||||||
logger.success('Migration 4 completed: Onebox Registry columns added to services table');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migration 5: Registry tokens table
|
|
||||||
const version5 = this.getMigrationVersion();
|
|
||||||
if (version5 < 5) {
|
|
||||||
logger.info('Running migration 5: Creating registry_tokens table...');
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE registry_tokens (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
token_hash TEXT NOT NULL UNIQUE,
|
|
||||||
token_type TEXT NOT NULL,
|
|
||||||
scope TEXT NOT NULL,
|
|
||||||
expires_at REAL,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
last_used_at REAL,
|
|
||||||
created_by TEXT NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_registry_tokens_type ON registry_tokens(token_type)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_registry_tokens_hash ON registry_tokens(token_hash)');
|
|
||||||
|
|
||||||
this.setMigrationVersion(5);
|
|
||||||
logger.success('Migration 5 completed: Registry tokens table created');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migration 6: Drop registry_token column from services table
|
|
||||||
const version6 = this.getMigrationVersion();
|
|
||||||
if (version6 < 6) {
|
|
||||||
logger.info('Running migration 6: Dropping registry_token column from services table...');
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE services_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
name TEXT NOT NULL UNIQUE,
|
|
||||||
image TEXT NOT NULL,
|
|
||||||
registry TEXT,
|
|
||||||
env_vars TEXT,
|
|
||||||
port INTEGER NOT NULL,
|
|
||||||
domain TEXT,
|
|
||||||
container_id TEXT,
|
|
||||||
status TEXT NOT NULL,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL,
|
|
||||||
use_onebox_registry INTEGER DEFAULT 0,
|
|
||||||
registry_repository TEXT,
|
|
||||||
registry_image_tag TEXT DEFAULT 'latest',
|
|
||||||
auto_update_on_push INTEGER DEFAULT 0,
|
|
||||||
image_digest TEXT
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
INSERT INTO services_new (
|
|
||||||
id, name, image, registry, env_vars, port, domain, container_id, status,
|
|
||||||
created_at, updated_at, use_onebox_registry, registry_repository,
|
|
||||||
registry_image_tag, auto_update_on_push, image_digest
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
id, name, image, registry, env_vars, port, domain, container_id, status,
|
|
||||||
created_at, updated_at, use_onebox_registry, registry_repository,
|
|
||||||
registry_image_tag, auto_update_on_push, image_digest
|
|
||||||
FROM services
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query('DROP TABLE services');
|
|
||||||
this.query('ALTER TABLE services_new RENAME TO services');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_services_name ON services(name)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_services_status ON services(status)');
|
|
||||||
|
|
||||||
this.setMigrationVersion(6);
|
|
||||||
logger.success('Migration 6 completed: registry_token column dropped from services table');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migration 7: Platform services tables
|
|
||||||
const version7 = this.getMigrationVersion();
|
|
||||||
if (version7 < 7) {
|
|
||||||
logger.info('Running migration 7: Creating platform services tables...');
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE platform_services (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
name TEXT NOT NULL UNIQUE,
|
|
||||||
type TEXT NOT NULL,
|
|
||||||
status TEXT NOT NULL DEFAULT 'stopped',
|
|
||||||
container_id TEXT,
|
|
||||||
config TEXT NOT NULL DEFAULT '{}',
|
|
||||||
admin_credentials_encrypted TEXT,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE platform_resources (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
platform_service_id INTEGER NOT NULL,
|
|
||||||
service_id INTEGER NOT NULL,
|
|
||||||
resource_type TEXT NOT NULL,
|
|
||||||
resource_name TEXT NOT NULL,
|
|
||||||
credentials_encrypted TEXT NOT NULL,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
FOREIGN KEY (platform_service_id) REFERENCES platform_services(id) ON DELETE CASCADE,
|
|
||||||
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query(`ALTER TABLE services ADD COLUMN platform_requirements TEXT DEFAULT '{}'`);
|
|
||||||
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_platform_services_type ON platform_services(type)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_platform_resources_service ON platform_resources(service_id)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_platform_resources_platform ON platform_resources(platform_service_id)');
|
|
||||||
|
|
||||||
this.setMigrationVersion(7);
|
|
||||||
logger.success('Migration 7 completed: Platform services tables created');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migration 8: Convert certificates table to store PEM content
|
|
||||||
const version8 = this.getMigrationVersion();
|
|
||||||
if (version8 < 8) {
|
|
||||||
logger.info('Running migration 8: Converting certificates table to store PEM content...');
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE certificates_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
domain_id INTEGER NOT NULL,
|
|
||||||
cert_domain TEXT NOT NULL,
|
|
||||||
is_wildcard INTEGER NOT NULL DEFAULT 0,
|
|
||||||
cert_pem TEXT NOT NULL DEFAULT '',
|
|
||||||
key_pem TEXT NOT NULL DEFAULT '',
|
|
||||||
fullchain_pem TEXT NOT NULL DEFAULT '',
|
|
||||||
expiry_date REAL NOT NULL,
|
|
||||||
issuer TEXT NOT NULL,
|
|
||||||
is_valid INTEGER NOT NULL DEFAULT 1,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL,
|
|
||||||
FOREIGN KEY (domain_id) REFERENCES domains(id) ON DELETE CASCADE
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
INSERT INTO certificates_new (id, domain_id, cert_domain, is_wildcard, cert_pem, key_pem, fullchain_pem, expiry_date, issuer, is_valid, created_at, updated_at)
|
|
||||||
SELECT id, domain_id, cert_domain, is_wildcard, '', '', '', expiry_date, issuer, 0, created_at, updated_at FROM certificates
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query('DROP TABLE certificates');
|
|
||||||
this.query('ALTER TABLE certificates_new RENAME TO certificates');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_certificates_domain ON certificates(domain_id)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_certificates_expiry ON certificates(expiry_date)');
|
|
||||||
|
|
||||||
this.setMigrationVersion(8);
|
|
||||||
logger.success('Migration 8 completed: Certificates table now stores PEM content');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migration 9: Backup system tables
|
|
||||||
const version9 = this.getMigrationVersion();
|
|
||||||
if (version9 < 9) {
|
|
||||||
logger.info('Running migration 9: Creating backup system tables...');
|
|
||||||
|
|
||||||
// Add include_image_in_backup column to services table
|
|
||||||
this.query(`ALTER TABLE services ADD COLUMN include_image_in_backup INTEGER DEFAULT 1`);
|
|
||||||
|
|
||||||
// Create backups table
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE backups (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
service_id INTEGER NOT NULL,
|
|
||||||
service_name TEXT NOT NULL,
|
|
||||||
filename TEXT NOT NULL,
|
|
||||||
size_bytes INTEGER NOT NULL,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
includes_image INTEGER NOT NULL,
|
|
||||||
platform_resources TEXT NOT NULL DEFAULT '[]',
|
|
||||||
checksum TEXT NOT NULL,
|
|
||||||
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_backups_service ON backups(service_id)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_backups_created ON backups(created_at DESC)');
|
|
||||||
|
|
||||||
this.setMigrationVersion(9);
|
|
||||||
logger.success('Migration 9 completed: Backup system tables created');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migration 10: Backup schedules table and extend backups table
|
|
||||||
const version10 = this.getMigrationVersion();
|
|
||||||
if (version10 < 10) {
|
|
||||||
logger.info('Running migration 10: Creating backup schedules table...');
|
|
||||||
|
|
||||||
// Create backup_schedules table
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE backup_schedules (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
service_id INTEGER NOT NULL,
|
|
||||||
service_name TEXT NOT NULL,
|
|
||||||
cron_expression TEXT NOT NULL,
|
|
||||||
retention_tier TEXT NOT NULL,
|
|
||||||
enabled INTEGER NOT NULL DEFAULT 1,
|
|
||||||
last_run_at REAL,
|
|
||||||
next_run_at REAL,
|
|
||||||
last_status TEXT,
|
|
||||||
last_error TEXT,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL,
|
|
||||||
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_backup_schedules_service ON backup_schedules(service_id)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_backup_schedules_enabled ON backup_schedules(enabled)');
|
|
||||||
|
|
||||||
// Extend backups table with retention_tier and schedule_id columns
|
|
||||||
this.query('ALTER TABLE backups ADD COLUMN retention_tier TEXT');
|
|
||||||
this.query('ALTER TABLE backups ADD COLUMN schedule_id INTEGER REFERENCES backup_schedules(id) ON DELETE SET NULL');
|
|
||||||
|
|
||||||
this.setMigrationVersion(10);
|
|
||||||
logger.success('Migration 10 completed: Backup schedules table created');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migration 11: Add scope columns for global/pattern backup schedules
|
|
||||||
const version11 = this.getMigrationVersion();
|
|
||||||
if (version11 < 11) {
|
|
||||||
logger.info('Running migration 11: Adding scope columns to backup_schedules...');
|
|
||||||
|
|
||||||
// Recreate backup_schedules table with nullable service_id/service_name and new scope columns
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE backup_schedules_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
scope_type TEXT NOT NULL DEFAULT 'service',
|
|
||||||
scope_pattern TEXT,
|
|
||||||
service_id INTEGER,
|
|
||||||
service_name TEXT,
|
|
||||||
cron_expression TEXT NOT NULL,
|
|
||||||
retention_tier TEXT NOT NULL,
|
|
||||||
enabled INTEGER NOT NULL DEFAULT 1,
|
|
||||||
last_run_at REAL,
|
|
||||||
next_run_at REAL,
|
|
||||||
last_status TEXT,
|
|
||||||
last_error TEXT,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL,
|
|
||||||
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Copy existing schedules (all are service-specific)
|
|
||||||
this.query(`
|
|
||||||
INSERT INTO backup_schedules_new (
|
|
||||||
id, scope_type, scope_pattern, service_id, service_name, cron_expression,
|
|
||||||
retention_tier, enabled, last_run_at, next_run_at, last_status, last_error,
|
|
||||||
created_at, updated_at
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
id, 'service', NULL, service_id, service_name, cron_expression,
|
|
||||||
retention_tier, enabled, last_run_at, next_run_at, last_status, last_error,
|
|
||||||
created_at, updated_at
|
|
||||||
FROM backup_schedules
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query('DROP TABLE backup_schedules');
|
|
||||||
this.query('ALTER TABLE backup_schedules_new RENAME TO backup_schedules');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_backup_schedules_service ON backup_schedules(service_id)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_backup_schedules_enabled ON backup_schedules(enabled)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_backup_schedules_scope ON backup_schedules(scope_type)');
|
|
||||||
|
|
||||||
this.setMigrationVersion(11);
|
|
||||||
logger.success('Migration 11 completed: Scope columns added to backup_schedules');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migration 12: GFS retention policy - replace retention_tier with per-tier retention counts
|
|
||||||
const version12 = this.getMigrationVersion();
|
|
||||||
if (version12 < 12) {
|
|
||||||
logger.info('Running migration 12: Updating backup system for GFS retention policy...');
|
|
||||||
|
|
||||||
// Recreate backup_schedules table with new retention columns
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE backup_schedules_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
scope_type TEXT NOT NULL DEFAULT 'service',
|
|
||||||
scope_pattern TEXT,
|
|
||||||
service_id INTEGER,
|
|
||||||
service_name TEXT,
|
|
||||||
cron_expression TEXT NOT NULL,
|
|
||||||
retention_hourly INTEGER NOT NULL DEFAULT 0,
|
|
||||||
retention_daily INTEGER NOT NULL DEFAULT 7,
|
|
||||||
retention_weekly INTEGER NOT NULL DEFAULT 4,
|
|
||||||
retention_monthly INTEGER NOT NULL DEFAULT 12,
|
|
||||||
enabled INTEGER NOT NULL DEFAULT 1,
|
|
||||||
last_run_at REAL,
|
|
||||||
next_run_at REAL,
|
|
||||||
last_status TEXT,
|
|
||||||
last_error TEXT,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL,
|
|
||||||
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Migrate existing data - convert old retention_tier to new format
|
|
||||||
// daily -> D:7, weekly -> W:4, monthly -> M:12, yearly -> M:12 (yearly becomes long monthly retention)
|
|
||||||
this.query(`
|
|
||||||
INSERT INTO backup_schedules_new (
|
|
||||||
id, scope_type, scope_pattern, service_id, service_name, cron_expression,
|
|
||||||
retention_hourly, retention_daily, retention_weekly, retention_monthly,
|
|
||||||
enabled, last_run_at, next_run_at, last_status, last_error, created_at, updated_at
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
id, scope_type, scope_pattern, service_id, service_name, cron_expression,
|
|
||||||
0, -- retention_hourly
|
|
||||||
CASE WHEN retention_tier = 'daily' THEN 7 ELSE 0 END,
|
|
||||||
CASE WHEN retention_tier IN ('daily', 'weekly') THEN 4 ELSE 0 END,
|
|
||||||
CASE WHEN retention_tier IN ('daily', 'weekly', 'monthly') THEN 12
|
|
||||||
WHEN retention_tier = 'yearly' THEN 24 ELSE 12 END,
|
|
||||||
enabled, last_run_at, next_run_at, last_status, last_error, created_at, updated_at
|
|
||||||
FROM backup_schedules
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query('DROP TABLE backup_schedules');
|
|
||||||
this.query('ALTER TABLE backup_schedules_new RENAME TO backup_schedules');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_backup_schedules_service ON backup_schedules(service_id)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_backup_schedules_enabled ON backup_schedules(enabled)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_backup_schedules_scope ON backup_schedules(scope_type)');
|
|
||||||
|
|
||||||
// Recreate backups table without retention_tier column
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE backups_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
service_id INTEGER NOT NULL,
|
|
||||||
service_name TEXT NOT NULL,
|
|
||||||
filename TEXT NOT NULL,
|
|
||||||
size_bytes INTEGER NOT NULL,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
includes_image INTEGER NOT NULL,
|
|
||||||
platform_resources TEXT NOT NULL DEFAULT '[]',
|
|
||||||
checksum TEXT NOT NULL,
|
|
||||||
schedule_id INTEGER REFERENCES backup_schedules(id) ON DELETE SET NULL,
|
|
||||||
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
INSERT INTO backups_new (
|
|
||||||
id, service_id, service_name, filename, size_bytes, created_at,
|
|
||||||
includes_image, platform_resources, checksum, schedule_id
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
id, service_id, service_name, filename, size_bytes, created_at,
|
|
||||||
includes_image, platform_resources, checksum, schedule_id
|
|
||||||
FROM backups
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query('DROP TABLE backups');
|
|
||||||
this.query('ALTER TABLE backups_new RENAME TO backups');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_backups_service ON backups(service_id)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_backups_created ON backups(created_at DESC)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_backups_schedule ON backups(schedule_id)');
|
|
||||||
|
|
||||||
this.setMigrationVersion(12);
|
|
||||||
logger.success('Migration 12 completed: GFS retention policy schema updated');
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`Migration failed: ${getErrorMessage(error)}`);
|
|
||||||
if (error instanceof Error && error.stack) {
|
|
||||||
logger.error(`Stack: ${error.stack}`);
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get current migration version
|
|
||||||
*/
|
|
||||||
private getMigrationVersion(): number {
|
|
||||||
if (!this.db) throw new Error('Database not initialized');
|
|
||||||
|
|
||||||
try {
|
|
||||||
const result = this.query<{ version?: number | null; [key: number]: unknown }>('SELECT MAX(version) as version FROM migrations');
|
|
||||||
if (result.length === 0) return 0;
|
|
||||||
|
|
||||||
const versionValue = result[0].version ?? (result[0] as Record<number, unknown>)[0];
|
|
||||||
return versionValue !== null && versionValue !== undefined ? Number(versionValue) : 0;
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn(`Error getting migration version: ${getErrorMessage(error)}, defaulting to 0`);
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set migration version
|
|
||||||
*/
|
|
||||||
private setMigrationVersion(version: number): void {
|
|
||||||
if (!this.db) throw new Error('Database not initialized');
|
|
||||||
|
|
||||||
this.query('INSERT INTO migrations (version, applied_at) VALUES (?, ?)', [
|
|
||||||
version,
|
|
||||||
Date.now(),
|
|
||||||
]);
|
|
||||||
logger.debug(`Migration version set to ${version}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Close database connection
|
* Close database connection
|
||||||
*/
|
*/
|
||||||
|
|||||||
22
ts/database/migrations/base-migration.ts
Normal file
22
ts/database/migrations/base-migration.ts
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
/**
|
||||||
|
* Abstract base class for database migrations.
|
||||||
|
* All migrations must extend this class and implement the abstract members.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export abstract class BaseMigration {
|
||||||
|
/** The migration version number (must be unique and sequential) */
|
||||||
|
abstract readonly version: number;
|
||||||
|
|
||||||
|
/** A short description of what this migration does */
|
||||||
|
abstract readonly description: string;
|
||||||
|
|
||||||
|
/** Execute the migration's SQL statements */
|
||||||
|
abstract up(query: TQueryFunction): void;
|
||||||
|
|
||||||
|
/** Returns a human-readable name for logging */
|
||||||
|
getName(): string {
|
||||||
|
return `Migration ${this.version}: ${this.description}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
2
ts/database/migrations/index.ts
Normal file
2
ts/database/migrations/index.ts
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
export { BaseMigration } from './base-migration.ts';
|
||||||
|
export { MigrationRunner } from './migration-runner.ts';
|
||||||
12
ts/database/migrations/migration-001-initial.ts
Normal file
12
ts/database/migrations/migration-001-initial.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration001Initial extends BaseMigration {
|
||||||
|
readonly version = 1;
|
||||||
|
readonly description = 'Initial schema';
|
||||||
|
|
||||||
|
up(_query: TQueryFunction): void {
|
||||||
|
// Initial schema is created by createTables() in the database class.
|
||||||
|
// This migration just marks the initial version.
|
||||||
|
}
|
||||||
|
}
|
||||||
170
ts/database/migrations/migration-002-timestamps-to-real.ts
Normal file
170
ts/database/migrations/migration-002-timestamps-to-real.ts
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration002TimestampsToReal extends BaseMigration {
|
||||||
|
readonly version = 2;
|
||||||
|
readonly description = 'Convert timestamp columns from INTEGER to REAL';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
// SSL certificates
|
||||||
|
query(`
|
||||||
|
CREATE TABLE ssl_certificates_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
domain TEXT NOT NULL UNIQUE,
|
||||||
|
cert_path TEXT NOT NULL,
|
||||||
|
key_path TEXT NOT NULL,
|
||||||
|
full_chain_path TEXT NOT NULL,
|
||||||
|
expiry_date REAL NOT NULL,
|
||||||
|
issuer TEXT NOT NULL,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO ssl_certificates_new SELECT * FROM ssl_certificates`);
|
||||||
|
query(`DROP TABLE ssl_certificates`);
|
||||||
|
query(`ALTER TABLE ssl_certificates_new RENAME TO ssl_certificates`);
|
||||||
|
|
||||||
|
// Services
|
||||||
|
query(`
|
||||||
|
CREATE TABLE services_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name TEXT NOT NULL UNIQUE,
|
||||||
|
image TEXT NOT NULL,
|
||||||
|
registry TEXT,
|
||||||
|
env_vars TEXT NOT NULL,
|
||||||
|
port INTEGER NOT NULL,
|
||||||
|
domain TEXT,
|
||||||
|
container_id TEXT,
|
||||||
|
status TEXT NOT NULL DEFAULT 'stopped',
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO services_new SELECT * FROM services`);
|
||||||
|
query(`DROP TABLE services`);
|
||||||
|
query(`ALTER TABLE services_new RENAME TO services`);
|
||||||
|
|
||||||
|
// Registries
|
||||||
|
query(`
|
||||||
|
CREATE TABLE registries_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
url TEXT NOT NULL UNIQUE,
|
||||||
|
username TEXT NOT NULL,
|
||||||
|
password_encrypted TEXT NOT NULL,
|
||||||
|
created_at REAL NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO registries_new SELECT * FROM registries`);
|
||||||
|
query(`DROP TABLE registries`);
|
||||||
|
query(`ALTER TABLE registries_new RENAME TO registries`);
|
||||||
|
|
||||||
|
// Nginx configs
|
||||||
|
query(`
|
||||||
|
CREATE TABLE nginx_configs_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
service_id INTEGER NOT NULL,
|
||||||
|
domain TEXT NOT NULL,
|
||||||
|
port INTEGER NOT NULL,
|
||||||
|
ssl_enabled INTEGER NOT NULL DEFAULT 0,
|
||||||
|
config_template TEXT NOT NULL,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO nginx_configs_new SELECT * FROM nginx_configs`);
|
||||||
|
query(`DROP TABLE nginx_configs`);
|
||||||
|
query(`ALTER TABLE nginx_configs_new RENAME TO nginx_configs`);
|
||||||
|
|
||||||
|
// DNS records
|
||||||
|
query(`
|
||||||
|
CREATE TABLE dns_records_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
domain TEXT NOT NULL UNIQUE,
|
||||||
|
type TEXT NOT NULL,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
cloudflare_id TEXT,
|
||||||
|
zone_id TEXT,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO dns_records_new SELECT * FROM dns_records`);
|
||||||
|
query(`DROP TABLE dns_records`);
|
||||||
|
query(`ALTER TABLE dns_records_new RENAME TO dns_records`);
|
||||||
|
|
||||||
|
// Metrics
|
||||||
|
query(`
|
||||||
|
CREATE TABLE metrics_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
service_id INTEGER NOT NULL,
|
||||||
|
timestamp REAL NOT NULL,
|
||||||
|
cpu_percent REAL NOT NULL,
|
||||||
|
memory_used INTEGER NOT NULL,
|
||||||
|
memory_limit INTEGER NOT NULL,
|
||||||
|
network_rx_bytes INTEGER NOT NULL,
|
||||||
|
network_tx_bytes INTEGER NOT NULL,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO metrics_new SELECT * FROM metrics`);
|
||||||
|
query(`DROP TABLE metrics`);
|
||||||
|
query(`ALTER TABLE metrics_new RENAME TO metrics`);
|
||||||
|
query(`CREATE INDEX IF NOT EXISTS idx_metrics_service_timestamp ON metrics(service_id, timestamp DESC)`);
|
||||||
|
|
||||||
|
// Logs
|
||||||
|
query(`
|
||||||
|
CREATE TABLE logs_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
service_id INTEGER NOT NULL,
|
||||||
|
timestamp REAL NOT NULL,
|
||||||
|
message TEXT NOT NULL,
|
||||||
|
level TEXT NOT NULL,
|
||||||
|
source TEXT NOT NULL,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO logs_new SELECT * FROM logs`);
|
||||||
|
query(`DROP TABLE logs`);
|
||||||
|
query(`ALTER TABLE logs_new RENAME TO logs`);
|
||||||
|
query(`CREATE INDEX IF NOT EXISTS idx_logs_service_timestamp ON logs(service_id, timestamp DESC)`);
|
||||||
|
|
||||||
|
// Users
|
||||||
|
query(`
|
||||||
|
CREATE TABLE users_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
username TEXT NOT NULL UNIQUE,
|
||||||
|
password_hash TEXT NOT NULL,
|
||||||
|
role TEXT NOT NULL DEFAULT 'user',
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO users_new SELECT * FROM users`);
|
||||||
|
query(`DROP TABLE users`);
|
||||||
|
query(`ALTER TABLE users_new RENAME TO users`);
|
||||||
|
|
||||||
|
// Settings
|
||||||
|
query(`
|
||||||
|
CREATE TABLE settings_new (
|
||||||
|
key TEXT PRIMARY KEY,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
updated_at REAL NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO settings_new SELECT * FROM settings`);
|
||||||
|
query(`DROP TABLE settings`);
|
||||||
|
query(`ALTER TABLE settings_new RENAME TO settings`);
|
||||||
|
|
||||||
|
// Migrations table itself
|
||||||
|
query(`
|
||||||
|
CREATE TABLE migrations_new (
|
||||||
|
version INTEGER PRIMARY KEY,
|
||||||
|
applied_at REAL NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO migrations_new SELECT * FROM migrations`);
|
||||||
|
query(`DROP TABLE migrations`);
|
||||||
|
query(`ALTER TABLE migrations_new RENAME TO migrations`);
|
||||||
|
}
|
||||||
|
}
|
||||||
125
ts/database/migrations/migration-003-domain-management.ts
Normal file
125
ts/database/migrations/migration-003-domain-management.ts
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration003DomainManagement extends BaseMigration {
|
||||||
|
readonly version = 3;
|
||||||
|
readonly description = 'Domain management tables';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
query(`
|
||||||
|
CREATE TABLE domains (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
domain TEXT NOT NULL UNIQUE,
|
||||||
|
dns_provider TEXT,
|
||||||
|
cloudflare_zone_id TEXT,
|
||||||
|
is_obsolete INTEGER NOT NULL DEFAULT 0,
|
||||||
|
default_wildcard INTEGER NOT NULL DEFAULT 1,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(`
|
||||||
|
CREATE TABLE certificates (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
domain_id INTEGER NOT NULL,
|
||||||
|
cert_domain TEXT NOT NULL,
|
||||||
|
is_wildcard INTEGER NOT NULL DEFAULT 0,
|
||||||
|
cert_path TEXT NOT NULL,
|
||||||
|
key_path TEXT NOT NULL,
|
||||||
|
full_chain_path TEXT NOT NULL,
|
||||||
|
expiry_date REAL NOT NULL,
|
||||||
|
issuer TEXT NOT NULL,
|
||||||
|
is_valid INTEGER NOT NULL DEFAULT 1,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL,
|
||||||
|
FOREIGN KEY (domain_id) REFERENCES domains(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(`
|
||||||
|
CREATE TABLE cert_requirements (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
service_id INTEGER NOT NULL,
|
||||||
|
domain_id INTEGER NOT NULL,
|
||||||
|
subdomain TEXT NOT NULL,
|
||||||
|
certificate_id INTEGER,
|
||||||
|
status TEXT NOT NULL DEFAULT 'pending',
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE,
|
||||||
|
FOREIGN KEY (domain_id) REFERENCES domains(id) ON DELETE CASCADE,
|
||||||
|
FOREIGN KEY (certificate_id) REFERENCES certificates(id) ON DELETE SET NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Migrate data from old ssl_certificates table
|
||||||
|
interface OldSslCert {
|
||||||
|
id?: number;
|
||||||
|
domain?: string;
|
||||||
|
cert_path?: string;
|
||||||
|
key_path?: string;
|
||||||
|
full_chain_path?: string;
|
||||||
|
expiry_date?: number;
|
||||||
|
issuer?: string;
|
||||||
|
created_at?: number;
|
||||||
|
updated_at?: number;
|
||||||
|
[key: number]: unknown;
|
||||||
|
}
|
||||||
|
const existingCerts = query<OldSslCert>('SELECT * FROM ssl_certificates');
|
||||||
|
|
||||||
|
const now = Date.now();
|
||||||
|
const domainMap = new Map<string, number>();
|
||||||
|
|
||||||
|
for (const cert of existingCerts) {
|
||||||
|
const domain = String(cert.domain ?? (cert as Record<number, unknown>)[1]);
|
||||||
|
if (!domainMap.has(domain)) {
|
||||||
|
query(
|
||||||
|
'INSERT INTO domains (domain, dns_provider, is_obsolete, default_wildcard, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)',
|
||||||
|
[domain, null, 0, 1, now, now],
|
||||||
|
);
|
||||||
|
const result = query<{ id?: number; [key: number]: unknown }>(
|
||||||
|
'SELECT last_insert_rowid() as id',
|
||||||
|
);
|
||||||
|
const domainId = result[0].id ?? (result[0] as Record<number, unknown>)[0];
|
||||||
|
domainMap.set(domain, Number(domainId));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const cert of existingCerts) {
|
||||||
|
const domain = String(cert.domain ?? (cert as Record<number, unknown>)[1]);
|
||||||
|
const domainId = domainMap.get(domain);
|
||||||
|
|
||||||
|
query(
|
||||||
|
`INSERT INTO certificates (
|
||||||
|
domain_id, cert_domain, is_wildcard, cert_path, key_path, full_chain_path,
|
||||||
|
expiry_date, issuer, is_valid, created_at, updated_at
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||||
|
[
|
||||||
|
domainId,
|
||||||
|
domain,
|
||||||
|
0,
|
||||||
|
String(cert.cert_path ?? (cert as Record<number, unknown>)[2]),
|
||||||
|
String(cert.key_path ?? (cert as Record<number, unknown>)[3]),
|
||||||
|
String(cert.full_chain_path ?? (cert as Record<number, unknown>)[4]),
|
||||||
|
Number(cert.expiry_date ?? (cert as Record<number, unknown>)[5]),
|
||||||
|
String(cert.issuer ?? (cert as Record<number, unknown>)[6]),
|
||||||
|
1,
|
||||||
|
Number(cert.created_at ?? (cert as Record<number, unknown>)[7]),
|
||||||
|
Number(cert.updated_at ?? (cert as Record<number, unknown>)[8]),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
query('DROP TABLE ssl_certificates');
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_domains_cloudflare_zone ON domains(cloudflare_zone_id)');
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_certificates_domain ON certificates(domain_id)');
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_certificates_expiry ON certificates(expiry_date)');
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_cert_requirements_service ON cert_requirements(service_id)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_cert_requirements_domain ON cert_requirements(domain_id)',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
16
ts/database/migrations/migration-004-registry-columns.ts
Normal file
16
ts/database/migrations/migration-004-registry-columns.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration004RegistryColumns extends BaseMigration {
|
||||||
|
readonly version = 4;
|
||||||
|
readonly description = 'Add Onebox Registry columns to services table';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
query(`ALTER TABLE services ADD COLUMN use_onebox_registry INTEGER DEFAULT 0`);
|
||||||
|
query(`ALTER TABLE services ADD COLUMN registry_repository TEXT`);
|
||||||
|
query(`ALTER TABLE services ADD COLUMN registry_token TEXT`);
|
||||||
|
query(`ALTER TABLE services ADD COLUMN registry_image_tag TEXT DEFAULT 'latest'`);
|
||||||
|
query(`ALTER TABLE services ADD COLUMN auto_update_on_push INTEGER DEFAULT 0`);
|
||||||
|
query(`ALTER TABLE services ADD COLUMN image_digest TEXT`);
|
||||||
|
}
|
||||||
|
}
|
||||||
30
ts/database/migrations/migration-005-registry-tokens.ts
Normal file
30
ts/database/migrations/migration-005-registry-tokens.ts
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration005RegistryTokens extends BaseMigration {
|
||||||
|
readonly version = 5;
|
||||||
|
readonly description = 'Registry tokens table';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
query(`
|
||||||
|
CREATE TABLE registry_tokens (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
token_hash TEXT NOT NULL UNIQUE,
|
||||||
|
token_type TEXT NOT NULL,
|
||||||
|
scope TEXT NOT NULL,
|
||||||
|
expires_at REAL,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
last_used_at REAL,
|
||||||
|
created_by TEXT NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_registry_tokens_type ON registry_tokens(token_type)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_registry_tokens_hash ON registry_tokens(token_hash)',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
48
ts/database/migrations/migration-006-drop-registry-token.ts
Normal file
48
ts/database/migrations/migration-006-drop-registry-token.ts
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration006DropRegistryToken extends BaseMigration {
|
||||||
|
readonly version = 6;
|
||||||
|
readonly description = 'Drop registry_token column from services table';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
query(`
|
||||||
|
CREATE TABLE services_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name TEXT NOT NULL UNIQUE,
|
||||||
|
image TEXT NOT NULL,
|
||||||
|
registry TEXT,
|
||||||
|
env_vars TEXT,
|
||||||
|
port INTEGER NOT NULL,
|
||||||
|
domain TEXT,
|
||||||
|
container_id TEXT,
|
||||||
|
status TEXT NOT NULL,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL,
|
||||||
|
use_onebox_registry INTEGER DEFAULT 0,
|
||||||
|
registry_repository TEXT,
|
||||||
|
registry_image_tag TEXT DEFAULT 'latest',
|
||||||
|
auto_update_on_push INTEGER DEFAULT 0,
|
||||||
|
image_digest TEXT
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(`
|
||||||
|
INSERT INTO services_new (
|
||||||
|
id, name, image, registry, env_vars, port, domain, container_id, status,
|
||||||
|
created_at, updated_at, use_onebox_registry, registry_repository,
|
||||||
|
registry_image_tag, auto_update_on_push, image_digest
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
id, name, image, registry, env_vars, port, domain, container_id, status,
|
||||||
|
created_at, updated_at, use_onebox_registry, registry_repository,
|
||||||
|
registry_image_tag, auto_update_on_push, image_digest
|
||||||
|
FROM services
|
||||||
|
`);
|
||||||
|
|
||||||
|
query('DROP TABLE services');
|
||||||
|
query('ALTER TABLE services_new RENAME TO services');
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_services_name ON services(name)');
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_services_status ON services(status)');
|
||||||
|
}
|
||||||
|
}
|
||||||
49
ts/database/migrations/migration-007-platform-services.ts
Normal file
49
ts/database/migrations/migration-007-platform-services.ts
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration007PlatformServices extends BaseMigration {
|
||||||
|
readonly version = 7;
|
||||||
|
readonly description = 'Platform services tables';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
query(`
|
||||||
|
CREATE TABLE platform_services (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name TEXT NOT NULL UNIQUE,
|
||||||
|
type TEXT NOT NULL,
|
||||||
|
status TEXT NOT NULL DEFAULT 'stopped',
|
||||||
|
container_id TEXT,
|
||||||
|
config TEXT NOT NULL DEFAULT '{}',
|
||||||
|
admin_credentials_encrypted TEXT,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(`
|
||||||
|
CREATE TABLE platform_resources (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
platform_service_id INTEGER NOT NULL,
|
||||||
|
service_id INTEGER NOT NULL,
|
||||||
|
resource_type TEXT NOT NULL,
|
||||||
|
resource_name TEXT NOT NULL,
|
||||||
|
credentials_encrypted TEXT NOT NULL,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
FOREIGN KEY (platform_service_id) REFERENCES platform_services(id) ON DELETE CASCADE,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(`ALTER TABLE services ADD COLUMN platform_requirements TEXT DEFAULT '{}'`);
|
||||||
|
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_platform_services_type ON platform_services(type)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_platform_resources_service ON platform_resources(service_id)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_platform_resources_platform ON platform_resources(platform_service_id)',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
41
ts/database/migrations/migration-008-cert-pem-content.ts
Normal file
41
ts/database/migrations/migration-008-cert-pem-content.ts
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration008CertPemContent extends BaseMigration {
|
||||||
|
readonly version = 8;
|
||||||
|
readonly description = 'Convert certificates table to store PEM content';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
query(`
|
||||||
|
CREATE TABLE certificates_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
domain_id INTEGER NOT NULL,
|
||||||
|
cert_domain TEXT NOT NULL,
|
||||||
|
is_wildcard INTEGER NOT NULL DEFAULT 0,
|
||||||
|
cert_pem TEXT NOT NULL DEFAULT '',
|
||||||
|
key_pem TEXT NOT NULL DEFAULT '',
|
||||||
|
fullchain_pem TEXT NOT NULL DEFAULT '',
|
||||||
|
expiry_date REAL NOT NULL,
|
||||||
|
issuer TEXT NOT NULL,
|
||||||
|
is_valid INTEGER NOT NULL DEFAULT 1,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL,
|
||||||
|
FOREIGN KEY (domain_id) REFERENCES domains(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(`
|
||||||
|
INSERT INTO certificates_new (id, domain_id, cert_domain, is_wildcard, cert_pem, key_pem, fullchain_pem, expiry_date, issuer, is_valid, created_at, updated_at)
|
||||||
|
SELECT id, domain_id, cert_domain, is_wildcard, '', '', '', expiry_date, issuer, 0, created_at, updated_at FROM certificates
|
||||||
|
`);
|
||||||
|
|
||||||
|
query('DROP TABLE certificates');
|
||||||
|
query('ALTER TABLE certificates_new RENAME TO certificates');
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_certificates_domain ON certificates(domain_id)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_certificates_expiry ON certificates(expiry_date)',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
29
ts/database/migrations/migration-009-backup-system.ts
Normal file
29
ts/database/migrations/migration-009-backup-system.ts
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration009BackupSystem extends BaseMigration {
|
||||||
|
readonly version = 9;
|
||||||
|
readonly description = 'Backup system tables';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
query(`ALTER TABLE services ADD COLUMN include_image_in_backup INTEGER DEFAULT 1`);
|
||||||
|
|
||||||
|
query(`
|
||||||
|
CREATE TABLE backups (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
service_id INTEGER NOT NULL,
|
||||||
|
service_name TEXT NOT NULL,
|
||||||
|
filename TEXT NOT NULL,
|
||||||
|
size_bytes INTEGER NOT NULL,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
includes_image INTEGER NOT NULL,
|
||||||
|
platform_resources TEXT NOT NULL DEFAULT '[]',
|
||||||
|
checksum TEXT NOT NULL,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_backups_service ON backups(service_id)');
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_backups_created ON backups(created_at DESC)');
|
||||||
|
}
|
||||||
|
}
|
||||||
39
ts/database/migrations/migration-010-backup-schedules.ts
Normal file
39
ts/database/migrations/migration-010-backup-schedules.ts
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration010BackupSchedules extends BaseMigration {
|
||||||
|
readonly version = 10;
|
||||||
|
readonly description = 'Backup schedules table';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
query(`
|
||||||
|
CREATE TABLE backup_schedules (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
service_id INTEGER NOT NULL,
|
||||||
|
service_name TEXT NOT NULL,
|
||||||
|
cron_expression TEXT NOT NULL,
|
||||||
|
retention_tier TEXT NOT NULL,
|
||||||
|
enabled INTEGER NOT NULL DEFAULT 1,
|
||||||
|
last_run_at REAL,
|
||||||
|
next_run_at REAL,
|
||||||
|
last_status TEXT,
|
||||||
|
last_error TEXT,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_backup_schedules_service ON backup_schedules(service_id)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_backup_schedules_enabled ON backup_schedules(enabled)',
|
||||||
|
);
|
||||||
|
|
||||||
|
query('ALTER TABLE backups ADD COLUMN retention_tier TEXT');
|
||||||
|
query(
|
||||||
|
'ALTER TABLE backups ADD COLUMN schedule_id INTEGER REFERENCES backup_schedules(id) ON DELETE SET NULL',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
54
ts/database/migrations/migration-011-scope-columns.ts
Normal file
54
ts/database/migrations/migration-011-scope-columns.ts
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration011ScopeColumns extends BaseMigration {
|
||||||
|
readonly version = 11;
|
||||||
|
readonly description = 'Add scope columns to backup_schedules';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
query(`
|
||||||
|
CREATE TABLE backup_schedules_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
scope_type TEXT NOT NULL DEFAULT 'service',
|
||||||
|
scope_pattern TEXT,
|
||||||
|
service_id INTEGER,
|
||||||
|
service_name TEXT,
|
||||||
|
cron_expression TEXT NOT NULL,
|
||||||
|
retention_tier TEXT NOT NULL,
|
||||||
|
enabled INTEGER NOT NULL DEFAULT 1,
|
||||||
|
last_run_at REAL,
|
||||||
|
next_run_at REAL,
|
||||||
|
last_status TEXT,
|
||||||
|
last_error TEXT,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(`
|
||||||
|
INSERT INTO backup_schedules_new (
|
||||||
|
id, scope_type, scope_pattern, service_id, service_name, cron_expression,
|
||||||
|
retention_tier, enabled, last_run_at, next_run_at, last_status, last_error,
|
||||||
|
created_at, updated_at
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
id, 'service', NULL, service_id, service_name, cron_expression,
|
||||||
|
retention_tier, enabled, last_run_at, next_run_at, last_status, last_error,
|
||||||
|
created_at, updated_at
|
||||||
|
FROM backup_schedules
|
||||||
|
`);
|
||||||
|
|
||||||
|
query('DROP TABLE backup_schedules');
|
||||||
|
query('ALTER TABLE backup_schedules_new RENAME TO backup_schedules');
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_backup_schedules_service ON backup_schedules(service_id)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_backup_schedules_enabled ON backup_schedules(enabled)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_backup_schedules_scope ON backup_schedules(scope_type)',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
97
ts/database/migrations/migration-012-gfs-retention.ts
Normal file
97
ts/database/migrations/migration-012-gfs-retention.ts
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration012GfsRetention extends BaseMigration {
|
||||||
|
readonly version = 12;
|
||||||
|
readonly description = 'GFS retention policy schema';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
// Recreate backup_schedules with GFS retention columns
|
||||||
|
query(`
|
||||||
|
CREATE TABLE backup_schedules_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
scope_type TEXT NOT NULL DEFAULT 'service',
|
||||||
|
scope_pattern TEXT,
|
||||||
|
service_id INTEGER,
|
||||||
|
service_name TEXT,
|
||||||
|
cron_expression TEXT NOT NULL,
|
||||||
|
retention_hourly INTEGER NOT NULL DEFAULT 0,
|
||||||
|
retention_daily INTEGER NOT NULL DEFAULT 7,
|
||||||
|
retention_weekly INTEGER NOT NULL DEFAULT 4,
|
||||||
|
retention_monthly INTEGER NOT NULL DEFAULT 12,
|
||||||
|
enabled INTEGER NOT NULL DEFAULT 1,
|
||||||
|
last_run_at REAL,
|
||||||
|
next_run_at REAL,
|
||||||
|
last_status TEXT,
|
||||||
|
last_error TEXT,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Migrate existing data - convert old retention_tier to new format
|
||||||
|
query(`
|
||||||
|
INSERT INTO backup_schedules_new (
|
||||||
|
id, scope_type, scope_pattern, service_id, service_name, cron_expression,
|
||||||
|
retention_hourly, retention_daily, retention_weekly, retention_monthly,
|
||||||
|
enabled, last_run_at, next_run_at, last_status, last_error, created_at, updated_at
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
id, scope_type, scope_pattern, service_id, service_name, cron_expression,
|
||||||
|
0,
|
||||||
|
CASE WHEN retention_tier = 'daily' THEN 7 ELSE 0 END,
|
||||||
|
CASE WHEN retention_tier IN ('daily', 'weekly') THEN 4 ELSE 0 END,
|
||||||
|
CASE WHEN retention_tier IN ('daily', 'weekly', 'monthly') THEN 12
|
||||||
|
WHEN retention_tier = 'yearly' THEN 24 ELSE 12 END,
|
||||||
|
enabled, last_run_at, next_run_at, last_status, last_error, created_at, updated_at
|
||||||
|
FROM backup_schedules
|
||||||
|
`);
|
||||||
|
|
||||||
|
query('DROP TABLE backup_schedules');
|
||||||
|
query('ALTER TABLE backup_schedules_new RENAME TO backup_schedules');
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_backup_schedules_service ON backup_schedules(service_id)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_backup_schedules_enabled ON backup_schedules(enabled)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_backup_schedules_scope ON backup_schedules(scope_type)',
|
||||||
|
);
|
||||||
|
|
||||||
|
// Recreate backups table without retention_tier column
|
||||||
|
query(`
|
||||||
|
CREATE TABLE backups_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
service_id INTEGER NOT NULL,
|
||||||
|
service_name TEXT NOT NULL,
|
||||||
|
filename TEXT NOT NULL,
|
||||||
|
size_bytes INTEGER NOT NULL,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
includes_image INTEGER NOT NULL,
|
||||||
|
platform_resources TEXT NOT NULL DEFAULT '[]',
|
||||||
|
checksum TEXT NOT NULL,
|
||||||
|
schedule_id INTEGER REFERENCES backup_schedules(id) ON DELETE SET NULL,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(`
|
||||||
|
INSERT INTO backups_new (
|
||||||
|
id, service_id, service_name, filename, size_bytes, created_at,
|
||||||
|
includes_image, platform_resources, checksum, schedule_id
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
id, service_id, service_name, filename, size_bytes, created_at,
|
||||||
|
includes_image, platform_resources, checksum, schedule_id
|
||||||
|
FROM backups
|
||||||
|
`);
|
||||||
|
|
||||||
|
query('DROP TABLE backups');
|
||||||
|
query('ALTER TABLE backups_new RENAME TO backups');
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_backups_service ON backups(service_id)');
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_backups_created ON backups(created_at DESC)');
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_backups_schedule ON backups(schedule_id)');
|
||||||
|
}
|
||||||
|
}
|
||||||
100
ts/database/migrations/migration-runner.ts
Normal file
100
ts/database/migrations/migration-runner.ts
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
/**
|
||||||
|
* Migration runner - discovers, orders, and executes database migrations.
|
||||||
|
* Mirrors the pattern from @serve.zone/nupst.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
import { logger } from '../../logging.ts';
|
||||||
|
import { getErrorMessage } from '../../utils/error.ts';
|
||||||
|
|
||||||
|
import { Migration001Initial } from './migration-001-initial.ts';
|
||||||
|
import { Migration002TimestampsToReal } from './migration-002-timestamps-to-real.ts';
|
||||||
|
import { Migration003DomainManagement } from './migration-003-domain-management.ts';
|
||||||
|
import { Migration004RegistryColumns } from './migration-004-registry-columns.ts';
|
||||||
|
import { Migration005RegistryTokens } from './migration-005-registry-tokens.ts';
|
||||||
|
import { Migration006DropRegistryToken } from './migration-006-drop-registry-token.ts';
|
||||||
|
import { Migration007PlatformServices } from './migration-007-platform-services.ts';
|
||||||
|
import { Migration008CertPemContent } from './migration-008-cert-pem-content.ts';
|
||||||
|
import { Migration009BackupSystem } from './migration-009-backup-system.ts';
|
||||||
|
import { Migration010BackupSchedules } from './migration-010-backup-schedules.ts';
|
||||||
|
import { Migration011ScopeColumns } from './migration-011-scope-columns.ts';
|
||||||
|
import { Migration012GfsRetention } from './migration-012-gfs-retention.ts';
|
||||||
|
import type { BaseMigration } from './base-migration.ts';
|
||||||
|
|
||||||
|
export class MigrationRunner {
|
||||||
|
private query: TQueryFunction;
|
||||||
|
private migrations: BaseMigration[];
|
||||||
|
|
||||||
|
constructor(query: TQueryFunction) {
|
||||||
|
this.query = query;
|
||||||
|
|
||||||
|
// Register all migrations in order
|
||||||
|
this.migrations = [
|
||||||
|
new Migration001Initial(),
|
||||||
|
new Migration002TimestampsToReal(),
|
||||||
|
new Migration003DomainManagement(),
|
||||||
|
new Migration004RegistryColumns(),
|
||||||
|
new Migration005RegistryTokens(),
|
||||||
|
new Migration006DropRegistryToken(),
|
||||||
|
new Migration007PlatformServices(),
|
||||||
|
new Migration008CertPemContent(),
|
||||||
|
new Migration009BackupSystem(),
|
||||||
|
new Migration010BackupSchedules(),
|
||||||
|
new Migration011ScopeColumns(),
|
||||||
|
new Migration012GfsRetention(),
|
||||||
|
].sort((a, b) => a.version - b.version);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Run all pending migrations */
|
||||||
|
run(): void {
|
||||||
|
try {
|
||||||
|
const currentVersion = this.getMigrationVersion();
|
||||||
|
logger.info(`Current database migration version: ${currentVersion}`);
|
||||||
|
|
||||||
|
let applied = 0;
|
||||||
|
for (const migration of this.migrations) {
|
||||||
|
if (migration.version <= currentVersion) continue;
|
||||||
|
|
||||||
|
logger.info(`Running ${migration.getName()}...`);
|
||||||
|
migration.up(this.query);
|
||||||
|
this.setMigrationVersion(migration.version);
|
||||||
|
logger.success(`${migration.getName()} completed`);
|
||||||
|
applied++;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (applied > 0) {
|
||||||
|
logger.success(`Applied ${applied} migration(s)`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Migration failed: ${getErrorMessage(error)}`);
|
||||||
|
if (error instanceof Error && error.stack) {
|
||||||
|
logger.error(`Stack: ${error.stack}`);
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Get current migration version from the migrations table */
|
||||||
|
private getMigrationVersion(): number {
|
||||||
|
try {
|
||||||
|
const result = this.query<{ version?: number | null; [key: number]: unknown }>(
|
||||||
|
'SELECT MAX(version) as version FROM migrations',
|
||||||
|
);
|
||||||
|
if (result.length === 0) return 0;
|
||||||
|
|
||||||
|
const versionValue = result[0].version ?? (result[0] as Record<number, unknown>)[0];
|
||||||
|
return versionValue !== null && versionValue !== undefined ? Number(versionValue) : 0;
|
||||||
|
} catch {
|
||||||
|
// Table might not exist yet on fresh databases
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Record a migration version as applied */
|
||||||
|
private setMigrationVersion(version: number): void {
|
||||||
|
this.query('INSERT INTO migrations (version, applied_at) VALUES (?, ?)', [
|
||||||
|
version,
|
||||||
|
Date.now(),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
File diff suppressed because one or more lines are too long
@@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@serve.zone/onebox',
|
name: '@serve.zone/onebox',
|
||||||
version: '1.12.0',
|
version: '1.14.4',
|
||||||
description: 'Self-hosted container platform with automatic SSL and DNS - a mini Heroku for single servers'
|
description: 'Self-hosted container platform with automatic SSL and DNS - a mini Heroku for single servers'
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user