Compare commits
65 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 10598520d8 | |||
| 075b7946b1 | |||
| f47fca3304 | |||
| 575e010a6b | |||
| 60a5dc4663 | |||
| 36d80b1e27 | |||
| 465cf0ee72 | |||
| bd5cd5c0cb | |||
| b622565e34 | |||
| 56376121ab | |||
| e3359d1235 | |||
| f1eeec6922 | |||
| 69362bb529 | |||
| 857fcc50ba | |||
| 5d0df006eb | |||
| e6256502ce | |||
| d5dc141171 | |||
| 2538f5ae2c | |||
| 4613193dcc | |||
| 848b3afe54 | |||
| dd86bae942 | |||
| 4691c61544 | |||
| dfb2d3b340 | |||
| 6a19ab05e3 | |||
| 7b718da7a2 | |||
| ebaf545418 | |||
| 2cdfdaed55 | |||
| 2216804652 | |||
| 1b177037f5 | |||
| 9d6590927c | |||
| eaf401200c | |||
| e97a4d53ae | |||
| ca2b3b25a5 | |||
| 19703de50d | |||
| bcab4f274e | |||
| 64e947735f | |||
| 1e05c08002 | |||
| 167df321f9 | |||
| 49998c4c32 | |||
| 8045ec38df | |||
| 793fb18b43 | |||
| 09534fd899 | |||
| 5f3783a5e9 | |||
| 92555c5a5e | |||
| ddc7fa4bee | |||
| eceb5d99c8 | |||
| 0631b7731f | |||
| 4c485cdc0a | |||
| 0f0da0f2ef | |||
| 88367f70eb | |||
| bfcfef79da | |||
| d95270613b | |||
| 14f6746833 | |||
| fe8ca00337 | |||
| ba05cc84fe | |||
| 84c47cd7f5 | |||
| 9365f20f6d | |||
| bc2ed4b03a | |||
| e4dd4cce0a | |||
| 34c90e21db | |||
| ea7bb1395f | |||
| c529dfe34d | |||
| 6ba7e655e3 | |||
| c5d239ab28 | |||
| 5cd7e7c252 |
@@ -1,140 +0,0 @@
|
|||||||
# Onebox Development Notes
|
|
||||||
|
|
||||||
## ⚠️ CRITICAL DEVELOPMENT RULES ⚠️
|
|
||||||
|
|
||||||
### NEVER GUESS - ALWAYS READ THE ACTUAL CODE
|
|
||||||
**FUCKING ALWAYS look at the dependency actual code. Don't start fucking guessing stuff.**
|
|
||||||
|
|
||||||
run "pnpm run watch" when starting to do stuff, so the UI gets recompiled and the server automatically restarts on file changes.
|
|
||||||
|
|
||||||
When working with any dependency:
|
|
||||||
1. **READ the actual source code** in `node_modules/` or check the package documentation
|
|
||||||
2. **CHECK the exact API** - don't assume based on similar libraries
|
|
||||||
3. **VERIFY method names, return types, and property structures** before using them
|
|
||||||
4. **TEST with the actual implementation** - APIs change between versions
|
|
||||||
|
|
||||||
Common mistakes to avoid:
|
|
||||||
- ❌ Assuming API structure based on similar libraries
|
|
||||||
- ❌ Guessing method names or property paths
|
|
||||||
- ❌ Using outdated documentation without checking current version
|
|
||||||
- ✅ Read the actual TypeScript definitions in node_modules
|
|
||||||
- ✅ Check the package's README and changelog
|
|
||||||
- ✅ Test the actual behavior before implementing
|
|
||||||
|
|
||||||
## Architecture Changes
|
|
||||||
|
|
||||||
### Reverse Proxy Implementation
|
|
||||||
- **Replaced Nginx** with native Deno reverse proxy (`ts/classes/reverseproxy.ts`)
|
|
||||||
- Features:
|
|
||||||
- HTTP/HTTPS dual servers (ports 80/443)
|
|
||||||
- TLS/SSL certificate management with hot-reload
|
|
||||||
- WebSocket bidirectional proxying
|
|
||||||
- Dynamic routing from database
|
|
||||||
- SNI (Server Name Indication) support
|
|
||||||
|
|
||||||
### Code Organization
|
|
||||||
- Removed "onebox." prefix from all TypeScript files
|
|
||||||
- Organized into subfolders:
|
|
||||||
- `ts/classes/` - All class implementations
|
|
||||||
- `ts/` - Root level utilities (logging, types, plugins, cli, info)
|
|
||||||
|
|
||||||
### WebSocket Real-time Communication
|
|
||||||
- **Backend**: WebSocket endpoint at `/api/ws` (`ts/classes/httpserver.ts:96-174`)
|
|
||||||
- Connection management with client Set tracking
|
|
||||||
- Broadcast methods: `broadcast()`, `broadcastServiceUpdate()`, `broadcastServiceStatus()`
|
|
||||||
- Integrated with service lifecycle (start/stop/restart actions)
|
|
||||||
- Status monitoring loop broadcasts changes automatically
|
|
||||||
- **Frontend**: Angular WebSocket service (`ui/src/app/core/services/websocket.service.ts`)
|
|
||||||
- Auto-connects on app initialization
|
|
||||||
- Exponential backoff reconnection (max 5 attempts)
|
|
||||||
- RxJS Observable-based message streaming
|
|
||||||
- Components subscribe to real-time updates
|
|
||||||
- **Message Types**:
|
|
||||||
- `connected` - Initial connection confirmation
|
|
||||||
- `service_update` - Service lifecycle changes (action: created/updated/deleted/started/stopped)
|
|
||||||
- `service_status` - Real-time status changes from monitoring loop
|
|
||||||
- `system_status` - System-wide updates
|
|
||||||
- **Testing**: Use `.nogit/test-ws-updates.ts` to monitor WebSocket messages
|
|
||||||
|
|
||||||
### Docker Configuration
|
|
||||||
- **System Docker**: Uses root Docker at `/var/run/docker.sock` (NOT rootless)
|
|
||||||
- **Swarm Mode**: Enabled for service orchestration
|
|
||||||
- **API Access**: Interact with Docker via direct API calls to the socket
|
|
||||||
- ❌ DO NOT switch Docker CLI contexts
|
|
||||||
- ✅ Use curl/HTTP requests to `/var/run/docker.sock`
|
|
||||||
- **Network**: Overlay network `onebox-network` with `Attachable: true`
|
|
||||||
- **Services vs Containers**: All workloads run as Swarm services (not standalone containers)
|
|
||||||
|
|
||||||
## Debugging Tips
|
|
||||||
|
|
||||||
### Backend Logs
|
|
||||||
Use the background bash task to check server logs:
|
|
||||||
```bash
|
|
||||||
# Check for specific patterns (e.g., Login attempts)
|
|
||||||
BashOutput tool with filter: "Login|error|Error"
|
|
||||||
|
|
||||||
# Check all recent output
|
|
||||||
BashOutput tool without filter
|
|
||||||
```
|
|
||||||
|
|
||||||
The dev server runs with `--watch` so it auto-restarts on file changes.
|
|
||||||
|
|
||||||
### Frontend Testing
|
|
||||||
Use Playwright for UI testing:
|
|
||||||
```typescript
|
|
||||||
// Navigate to app
|
|
||||||
mcp__playwright__browser_navigate({ url: "http://localhost:3000" })
|
|
||||||
|
|
||||||
// Fill login form
|
|
||||||
mcp__playwright__browser_fill_form({
|
|
||||||
fields: [
|
|
||||||
{ name: "Username", type: "textbox", ref: "...", value: "admin" },
|
|
||||||
{ name: "Password", type: "textbox", ref: "...", value: "admin" }
|
|
||||||
]
|
|
||||||
})
|
|
||||||
|
|
||||||
// Click button
|
|
||||||
mcp__playwright__browser_click({ element: "Sign in button", ref: "..." })
|
|
||||||
|
|
||||||
// Check console errors
|
|
||||||
// Playwright automatically shows console messages in results
|
|
||||||
```
|
|
||||||
|
|
||||||
### Common Issues
|
|
||||||
|
|
||||||
#### Login Issue (Fixed)
|
|
||||||
**Problem**: `admin/admin` credentials returned "Invalid credentials"
|
|
||||||
|
|
||||||
**Root Cause**: `rowToUser()` function in database.ts was accessing rows as arrays `row[2]` instead of objects `row.password_hash`. The @db/sqlite library returns rows as objects with snake_case column names.
|
|
||||||
|
|
||||||
**Fix**: Updated `rowToUser()` to support both access patterns:
|
|
||||||
```typescript
|
|
||||||
private rowToUser(row: any): IUser {
|
|
||||||
return {
|
|
||||||
passwordHash: String(row.password_hash || row[2]),
|
|
||||||
// ... other fields
|
|
||||||
};
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Location**: `ts/classes/database.ts:506-515`
|
|
||||||
|
|
||||||
## Default Credentials
|
|
||||||
- Username: `admin`
|
|
||||||
- Password: `admin`
|
|
||||||
- ⚠️ Change immediately after first login!
|
|
||||||
|
|
||||||
## Development Server
|
|
||||||
```bash
|
|
||||||
# Main server (port 3000)
|
|
||||||
deno task dev
|
|
||||||
|
|
||||||
# Check server status
|
|
||||||
curl http://localhost:3000/api/status
|
|
||||||
```
|
|
||||||
|
|
||||||
## API Endpoints
|
|
||||||
- `POST /api/auth/login` - Login (returns JWT-like token)
|
|
||||||
- `GET /api/status` - System status (requires auth)
|
|
||||||
- `GET /api/services` - List services (requires auth)
|
|
||||||
- See `ts/classes/httpserver.ts` for full API
|
|
||||||
37
.gitea/release-template.md
Normal file
37
.gitea/release-template.md
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
## Onebox {{VERSION}}
|
||||||
|
|
||||||
|
Pre-compiled binaries for multiple platforms.
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
#### Option 1: Via npm (recommended)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install -g @serve.zone/onebox
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Option 2: Via installer script
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -sSL https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh | sudo bash
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Option 3: Direct binary download
|
||||||
|
|
||||||
|
Download the appropriate binary for your platform from the assets below and make it executable.
|
||||||
|
|
||||||
|
### Supported Platforms
|
||||||
|
|
||||||
|
- Linux x86_64 (x64)
|
||||||
|
- Linux ARM64 (aarch64)
|
||||||
|
- macOS x86_64 (Intel)
|
||||||
|
- macOS ARM64 (Apple Silicon)
|
||||||
|
- Windows x86_64
|
||||||
|
|
||||||
|
### Checksums
|
||||||
|
|
||||||
|
SHA256 checksums are provided in `SHA256SUMS.txt` for binary verification.
|
||||||
|
|
||||||
|
### npm Package
|
||||||
|
|
||||||
|
The npm package includes automatic binary detection and installation for your platform.
|
||||||
114
.gitea/workflows/ci.yml
Normal file
114
.gitea/workflows/ci.yml
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
name: CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check:
|
||||||
|
name: Type Check & Lint
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: code.foss.global/host.today/ht-docker-node:latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Deno
|
||||||
|
uses: denoland/setup-deno@v1
|
||||||
|
with:
|
||||||
|
deno-version: v2.x
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: deno install --entrypoint mod.ts
|
||||||
|
|
||||||
|
- name: Check TypeScript types
|
||||||
|
run: deno check mod.ts
|
||||||
|
|
||||||
|
- name: Lint code
|
||||||
|
run: deno lint
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Format check
|
||||||
|
run: deno fmt --check
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
build:
|
||||||
|
name: Build Test (Current Platform)
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: code.foss.global/host.today/ht-docker-node:latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Deno
|
||||||
|
uses: denoland/setup-deno@v1
|
||||||
|
with:
|
||||||
|
deno-version: v2.x
|
||||||
|
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '22'
|
||||||
|
|
||||||
|
- name: Enable corepack
|
||||||
|
run: corepack enable
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: pnpm install --ignore-scripts
|
||||||
|
|
||||||
|
- name: Compile for current platform
|
||||||
|
run: |
|
||||||
|
echo "Testing compilation for Linux x86_64..."
|
||||||
|
npx tsdeno compile --allow-all --no-check \
|
||||||
|
--output onebox-test \
|
||||||
|
--target x86_64-unknown-linux-gnu mod.ts
|
||||||
|
|
||||||
|
- name: Test binary execution
|
||||||
|
run: |
|
||||||
|
chmod +x onebox-test
|
||||||
|
./onebox-test --version
|
||||||
|
./onebox-test --help
|
||||||
|
|
||||||
|
build-all:
|
||||||
|
name: Build All Platforms
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: code.foss.global/host.today/ht-docker-node:latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Deno
|
||||||
|
uses: denoland/setup-deno@v1
|
||||||
|
with:
|
||||||
|
deno-version: v2.x
|
||||||
|
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '22'
|
||||||
|
|
||||||
|
- name: Enable corepack
|
||||||
|
run: corepack enable
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: pnpm install --ignore-scripts
|
||||||
|
|
||||||
|
- name: Compile all platform binaries
|
||||||
|
run: mkdir -p dist/binaries && npx tsdeno compile
|
||||||
|
|
||||||
|
- name: Upload all binaries as artifact
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: onebox-binaries.zip
|
||||||
|
path: dist/binaries/*
|
||||||
|
retention-days: 30
|
||||||
131
.gitea/workflows/npm-publish.yml
Normal file
131
.gitea/workflows/npm-publish.yml
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
name: Publish to npm
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- 'v*'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
npm-publish:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: code.foss.global/host.today/ht-docker-node:latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Deno
|
||||||
|
uses: denoland/setup-deno@v1
|
||||||
|
with:
|
||||||
|
deno-version: v2.x
|
||||||
|
|
||||||
|
- name: Setup Node.js for npm publishing
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '18.x'
|
||||||
|
registry-url: 'https://registry.npmjs.org/'
|
||||||
|
|
||||||
|
- name: Get version from tag
|
||||||
|
id: version
|
||||||
|
run: |
|
||||||
|
VERSION=${GITHUB_REF#refs/tags/}
|
||||||
|
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||||
|
echo "version_number=${VERSION#v}" >> $GITHUB_OUTPUT
|
||||||
|
echo "Publishing version: $VERSION"
|
||||||
|
|
||||||
|
- name: Verify deno.json version matches tag
|
||||||
|
run: |
|
||||||
|
DENO_VERSION=$(grep -o '"version": "[^"]*"' deno.json | cut -d'"' -f4)
|
||||||
|
TAG_VERSION="${{ steps.version.outputs.version_number }}"
|
||||||
|
echo "deno.json version: $DENO_VERSION"
|
||||||
|
echo "Tag version: $TAG_VERSION"
|
||||||
|
if [ "$DENO_VERSION" != "$TAG_VERSION" ]; then
|
||||||
|
echo "ERROR: Version mismatch!"
|
||||||
|
echo "deno.json has version $DENO_VERSION but tag is $TAG_VERSION"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Compile binaries for npm package
|
||||||
|
run: |
|
||||||
|
echo "Compiling binaries for npm package..."
|
||||||
|
deno task compile
|
||||||
|
echo ""
|
||||||
|
echo "Binary sizes:"
|
||||||
|
ls -lh dist/binaries/
|
||||||
|
|
||||||
|
- name: Generate SHA256 checksums
|
||||||
|
run: |
|
||||||
|
cd dist/binaries
|
||||||
|
sha256sum * > SHA256SUMS
|
||||||
|
cat SHA256SUMS
|
||||||
|
cd ../..
|
||||||
|
|
||||||
|
- name: Sync package.json version
|
||||||
|
run: |
|
||||||
|
VERSION="${{ steps.version.outputs.version_number }}"
|
||||||
|
echo "Syncing package.json to version ${VERSION}..."
|
||||||
|
npm version ${VERSION} --no-git-tag-version --allow-same-version
|
||||||
|
echo "package.json version: $(grep '"version"' package.json | head -1)"
|
||||||
|
|
||||||
|
- name: Create npm package
|
||||||
|
run: |
|
||||||
|
echo "Creating npm package..."
|
||||||
|
npm pack
|
||||||
|
echo ""
|
||||||
|
echo "Package created:"
|
||||||
|
ls -lh *.tgz
|
||||||
|
|
||||||
|
- name: Test local installation
|
||||||
|
run: |
|
||||||
|
echo "Testing local package installation..."
|
||||||
|
PACKAGE_FILE=$(ls *.tgz)
|
||||||
|
npm install -g ${PACKAGE_FILE}
|
||||||
|
echo ""
|
||||||
|
echo "Testing onebox command:"
|
||||||
|
onebox --version || echo "Note: Binary execution may fail in CI environment"
|
||||||
|
echo ""
|
||||||
|
echo "Checking installed files:"
|
||||||
|
npm ls -g @serve.zone/onebox || true
|
||||||
|
|
||||||
|
- name: Publish to npm
|
||||||
|
env:
|
||||||
|
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
|
run: |
|
||||||
|
echo "Publishing to npm registry..."
|
||||||
|
npm publish --access public
|
||||||
|
echo ""
|
||||||
|
echo "Successfully published @serve.zone/onebox to npm!"
|
||||||
|
echo ""
|
||||||
|
echo "Package info:"
|
||||||
|
npm view @serve.zone/onebox
|
||||||
|
|
||||||
|
- name: Verify npm package
|
||||||
|
run: |
|
||||||
|
echo "Waiting for npm propagation..."
|
||||||
|
sleep 30
|
||||||
|
echo ""
|
||||||
|
echo "Verifying published package..."
|
||||||
|
npm view @serve.zone/onebox
|
||||||
|
echo ""
|
||||||
|
echo "Testing installation from npm:"
|
||||||
|
npm install -g @serve.zone/onebox
|
||||||
|
echo ""
|
||||||
|
echo "Package installed successfully!"
|
||||||
|
which onebox || echo "Binary location check skipped"
|
||||||
|
|
||||||
|
- name: Publish Summary
|
||||||
|
run: |
|
||||||
|
echo "================================================"
|
||||||
|
echo " npm Publish Complete!"
|
||||||
|
echo "================================================"
|
||||||
|
echo ""
|
||||||
|
echo "Package: @serve.zone/onebox"
|
||||||
|
echo "Version: ${{ steps.version.outputs.version }}"
|
||||||
|
echo ""
|
||||||
|
echo "Installation:"
|
||||||
|
echo " npm install -g @serve.zone/onebox"
|
||||||
|
echo ""
|
||||||
|
echo "Registry:"
|
||||||
|
echo " https://www.npmjs.com/package/@serve.zone/onebox"
|
||||||
|
echo ""
|
||||||
211
.gitea/workflows/release.yml
Normal file
211
.gitea/workflows/release.yml
Normal file
@@ -0,0 +1,211 @@
|
|||||||
|
name: Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- 'v*'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-release:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: code.foss.global/host.today/ht-docker-node:latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set up Deno
|
||||||
|
uses: denoland/setup-deno@v1
|
||||||
|
with:
|
||||||
|
deno-version: v2.x
|
||||||
|
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '22'
|
||||||
|
|
||||||
|
- name: Enable corepack
|
||||||
|
run: corepack enable
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: pnpm install --ignore-scripts
|
||||||
|
|
||||||
|
- name: Get version from tag
|
||||||
|
id: version
|
||||||
|
run: |
|
||||||
|
VERSION=${GITHUB_REF#refs/tags/}
|
||||||
|
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||||
|
echo "version_number=${VERSION#v}" >> $GITHUB_OUTPUT
|
||||||
|
echo "Building version: $VERSION"
|
||||||
|
|
||||||
|
- name: Verify deno.json version matches tag
|
||||||
|
run: |
|
||||||
|
DENO_VERSION=$(grep -o '"version": "[^"]*"' deno.json | cut -d'"' -f4)
|
||||||
|
TAG_VERSION="${{ steps.version.outputs.version_number }}"
|
||||||
|
echo "deno.json version: $DENO_VERSION"
|
||||||
|
echo "Tag version: $TAG_VERSION"
|
||||||
|
if [ "$DENO_VERSION" != "$TAG_VERSION" ]; then
|
||||||
|
echo "ERROR: Version mismatch!"
|
||||||
|
echo "deno.json has version $DENO_VERSION but tag is $TAG_VERSION"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Compile binaries for all platforms
|
||||||
|
run: mkdir -p dist/binaries && npx tsdeno compile
|
||||||
|
|
||||||
|
- name: Generate SHA256 checksums
|
||||||
|
run: |
|
||||||
|
cd dist/binaries
|
||||||
|
sha256sum * > SHA256SUMS.txt
|
||||||
|
cat SHA256SUMS.txt
|
||||||
|
cd ../..
|
||||||
|
|
||||||
|
- name: Extract changelog for this version
|
||||||
|
id: changelog
|
||||||
|
run: |
|
||||||
|
VERSION="${{ steps.version.outputs.version }}"
|
||||||
|
|
||||||
|
# Check if CHANGELOG.md exists
|
||||||
|
if [ ! -f CHANGELOG.md ] && [ ! -f changelog.md ]; then
|
||||||
|
echo "No changelog found, using default release notes"
|
||||||
|
cat > /tmp/release_notes.md << EOF
|
||||||
|
## Onebox $VERSION
|
||||||
|
|
||||||
|
Pre-compiled binaries for multiple platforms.
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
Use the installation script:
|
||||||
|
\`\`\`bash
|
||||||
|
curl -sSL https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh | sudo bash
|
||||||
|
\`\`\`
|
||||||
|
|
||||||
|
Or download the binary for your platform and make it executable.
|
||||||
|
|
||||||
|
### Supported Platforms
|
||||||
|
- Linux x86_64 (x64)
|
||||||
|
- Linux ARM64 (aarch64)
|
||||||
|
- macOS x86_64 (Intel)
|
||||||
|
- macOS ARM64 (Apple Silicon)
|
||||||
|
- Windows x86_64
|
||||||
|
|
||||||
|
### Checksums
|
||||||
|
SHA256 checksums are provided in SHA256SUMS.txt
|
||||||
|
EOF
|
||||||
|
else
|
||||||
|
CHANGELOG_FILE=$([ -f CHANGELOG.md ] && echo "CHANGELOG.md" || echo "changelog.md")
|
||||||
|
awk "/## \[$VERSION\]/,/## \[/" "$CHANGELOG_FILE" | sed '$d' > /tmp/release_notes.md || cat > /tmp/release_notes.md << EOF
|
||||||
|
## Onebox $VERSION
|
||||||
|
|
||||||
|
See changelog.md for full details.
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
Use the installation script:
|
||||||
|
\`\`\`bash
|
||||||
|
curl -sSL https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh | sudo bash
|
||||||
|
\`\`\`
|
||||||
|
EOF
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Release notes:"
|
||||||
|
cat /tmp/release_notes.md
|
||||||
|
|
||||||
|
- name: Delete existing release if it exists
|
||||||
|
run: |
|
||||||
|
VERSION="${{ steps.version.outputs.version }}"
|
||||||
|
|
||||||
|
echo "Checking for existing release $VERSION..."
|
||||||
|
|
||||||
|
# Try to get existing release by tag
|
||||||
|
EXISTING_RELEASE_ID=$(curl -s \
|
||||||
|
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||||
|
"https://code.foss.global/api/v1/repos/serve.zone/onebox/releases/tags/$VERSION" \
|
||||||
|
| jq -r '.id // empty')
|
||||||
|
|
||||||
|
if [ -n "$EXISTING_RELEASE_ID" ]; then
|
||||||
|
echo "Found existing release (ID: $EXISTING_RELEASE_ID), deleting..."
|
||||||
|
curl -X DELETE -s \
|
||||||
|
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||||
|
"https://code.foss.global/api/v1/repos/serve.zone/onebox/releases/$EXISTING_RELEASE_ID"
|
||||||
|
echo "Existing release deleted"
|
||||||
|
sleep 2
|
||||||
|
else
|
||||||
|
echo "No existing release found, proceeding with creation"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Create Gitea Release
|
||||||
|
run: |
|
||||||
|
VERSION="${{ steps.version.outputs.version }}"
|
||||||
|
RELEASE_NOTES=$(cat /tmp/release_notes.md)
|
||||||
|
|
||||||
|
# Create the release
|
||||||
|
echo "Creating release for $VERSION..."
|
||||||
|
RELEASE_ID=$(curl -X POST -s \
|
||||||
|
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
"https://code.foss.global/api/v1/repos/serve.zone/onebox/releases" \
|
||||||
|
-d "{
|
||||||
|
\"tag_name\": \"$VERSION\",
|
||||||
|
\"name\": \"Onebox $VERSION\",
|
||||||
|
\"body\": $(jq -Rs . /tmp/release_notes.md),
|
||||||
|
\"draft\": false,
|
||||||
|
\"prerelease\": false
|
||||||
|
}" | jq -r '.id')
|
||||||
|
|
||||||
|
echo "Release created with ID: $RELEASE_ID"
|
||||||
|
|
||||||
|
# Upload binaries as release assets
|
||||||
|
for binary in dist/binaries/*; do
|
||||||
|
filename=$(basename "$binary")
|
||||||
|
echo "Uploading $filename..."
|
||||||
|
curl -X POST -s \
|
||||||
|
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||||
|
-H "Content-Type: application/octet-stream" \
|
||||||
|
--data-binary "@$binary" \
|
||||||
|
"https://code.foss.global/api/v1/repos/serve.zone/onebox/releases/$RELEASE_ID/assets?name=$filename"
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "All assets uploaded successfully"
|
||||||
|
|
||||||
|
- name: Clean up old releases
|
||||||
|
run: |
|
||||||
|
echo "Cleaning up old releases (keeping only last 3)..."
|
||||||
|
|
||||||
|
# Fetch all releases sorted by creation date
|
||||||
|
RELEASES=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||||
|
"https://code.foss.global/api/v1/repos/serve.zone/onebox/releases" | \
|
||||||
|
jq -r 'sort_by(.created_at) | reverse | .[3:] | .[].id')
|
||||||
|
|
||||||
|
# Delete old releases
|
||||||
|
if [ -n "$RELEASES" ]; then
|
||||||
|
echo "Found releases to delete:"
|
||||||
|
for release_id in $RELEASES; do
|
||||||
|
echo " Deleting release ID: $release_id"
|
||||||
|
curl -X DELETE -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||||
|
"https://code.foss.global/api/v1/repos/serve.zone/onebox/releases/$release_id"
|
||||||
|
done
|
||||||
|
echo "Old releases deleted successfully"
|
||||||
|
else
|
||||||
|
echo "No old releases to delete (less than 4 releases total)"
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
- name: Release Summary
|
||||||
|
run: |
|
||||||
|
echo "================================================"
|
||||||
|
echo " Release ${{ steps.version.outputs.version }} Complete!"
|
||||||
|
echo "================================================"
|
||||||
|
echo ""
|
||||||
|
echo "Binaries published:"
|
||||||
|
ls -lh dist/binaries/
|
||||||
|
echo ""
|
||||||
|
echo "Release URL:"
|
||||||
|
echo "https://code.foss.global/serve.zone/onebox/releases/tag/${{ steps.version.outputs.version }}"
|
||||||
|
echo ""
|
||||||
|
echo "Installation command:"
|
||||||
|
echo "curl -sSL https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh | sudo bash"
|
||||||
|
echo ""
|
||||||
210
changelog.md
210
changelog.md
@@ -1,5 +1,215 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 2026-03-16 - 1.14.1 - fix(repo)
|
||||||
|
no changes to commit
|
||||||
|
|
||||||
|
|
||||||
|
## 2026-03-16 - 1.14.0 - feat(daemon)
|
||||||
|
auto-install Docker and initialize Swarm during daemon service setup
|
||||||
|
|
||||||
|
- Adds a Docker availability check before installing the Onebox daemon service
|
||||||
|
- Installs Docker automatically when it is missing using the standard installation script
|
||||||
|
- Attempts to initialize Docker Swarm after installation and handles already-initialized environments gracefully
|
||||||
|
|
||||||
|
## 2026-03-16 - 1.13.17 - fix(ci)
|
||||||
|
remove forced container image pulling from Gitea workflow jobs
|
||||||
|
|
||||||
|
- Drops the `--pull always` container option from CI, npm publish, and release workflows.
|
||||||
|
- Keeps workflow container images unchanged while avoiding forced pulls on every job run.
|
||||||
|
|
||||||
|
## 2026-03-16 - 1.13.16 - fix(ci)
|
||||||
|
refresh workflow container images on every run and bump @apiclient.xyz/docker to ^5.1.1
|
||||||
|
|
||||||
|
- add --pull always to CI, release, and npm publish workflow containers to avoid stale images
|
||||||
|
- update @apiclient.xyz/docker from ^5.1.0 to ^5.1.1 in deno.json
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.15 - fix(repo)
|
||||||
|
no changes to commit
|
||||||
|
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.14 - fix(repo)
|
||||||
|
no changes to commit
|
||||||
|
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.13 - fix(repo)
|
||||||
|
no changes to commit
|
||||||
|
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.12 - fix(ci)
|
||||||
|
run pnpm install with --ignore-scripts in CI and release workflows
|
||||||
|
|
||||||
|
- Update CI workflow dependency installation steps to skip lifecycle scripts during builds.
|
||||||
|
- Apply the same install change to the release workflow for consistent automation behavior.
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.11 - fix(project)
|
||||||
|
no changes to commit
|
||||||
|
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.10 - fix(deps)
|
||||||
|
bump @git.zone/tsdeno to ^1.2.0
|
||||||
|
|
||||||
|
- Updates the tsdeno development dependency from ^1.1.1 to ^1.2.0.
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.9 - fix(repo)
|
||||||
|
no changes to commit
|
||||||
|
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.8 - fix(repo)
|
||||||
|
no changes to commit
|
||||||
|
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.7 - fix(repo)
|
||||||
|
no changes to commit
|
||||||
|
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.6 - fix(ci)
|
||||||
|
correct workflow container image registry path
|
||||||
|
|
||||||
|
- Update Gitea CI, release, and npm publish workflows to use the corrected ht-docker-node image path
|
||||||
|
- Align all workflow container references from hosttoday to host.today to prevent pipeline image resolution issues
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.5 - fix(workflows)
|
||||||
|
switch Gitea workflow containers from ht-docker-dbase to ht-docker-node
|
||||||
|
|
||||||
|
- Updates the CI, release, and npm publish workflows to use the Node-focused container image consistently.
|
||||||
|
- Aligns workflow runtime images with the project's Node and Deno build and publish steps.
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.4 - fix(ci)
|
||||||
|
run workflows in the shared build container and enable corepack for pnpm installs
|
||||||
|
|
||||||
|
- adds the ht-docker-dbase container image to CI, release, and npm publish workflows
|
||||||
|
- enables corepack before pnpm install in build and release jobs to ensure package manager availability
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.3 - fix(build)
|
||||||
|
replace custom Deno compile scripts with tsdeno-based binary builds in CI and release workflows
|
||||||
|
|
||||||
|
- adds @git.zone/tsdeno as a dev dependency and configures compile targets in npmextra.json
|
||||||
|
- updates CI and release workflows to install Node.js dependencies before running tsdeno compile
|
||||||
|
- removes the legacy scripts/compile-all.sh script and points the compile task to tsdeno compile
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.2 - fix(scripts)
|
||||||
|
install production dependencies before compiling binaries and exclude local node_modules from builds
|
||||||
|
|
||||||
|
- Adds a dependency installation step using the application entrypoint before cross-platform compilation
|
||||||
|
- Updates all deno compile targets to use --node-modules-dir=none to avoid bundling local node_modules
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.1 - fix(deno)
|
||||||
|
remove nodeModulesDir from Deno configuration
|
||||||
|
|
||||||
|
- Drops the explicit nodeModulesDir setting from deno.json.
|
||||||
|
- Keeps the package version unchanged at 1.13.0 while simplifying runtime configuration.
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.13.0 - feat(install)
|
||||||
|
improve installer with version selection, service restart handling, and upgrade documentation
|
||||||
|
|
||||||
|
- Adds installer command-line options for help, specific version selection, and custom install directory.
|
||||||
|
- Fetches the latest release from the Gitea API when no version is provided and installs the matching platform binary.
|
||||||
|
- Preserves Onebox data directories, stops and restarts the systemd service during updates, and refreshes installation instructions in the README including upgrade usage.
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.12.1 - fix(package.json)
|
||||||
|
update package metadata
|
||||||
|
|
||||||
|
- Single metadata-only file changed (+1, -1)
|
||||||
|
- No source code or runtime behavior modified; safe patch release
|
||||||
|
|
||||||
|
## 2026-03-15 - 1.12.0 - feat(cli,release)
|
||||||
|
add self-upgrade command and automate CI, release, and npm publishing workflows
|
||||||
|
|
||||||
|
- adds a new `onebox upgrade` CLI command that checks the latest release and reinstalls the current binary via the installer script
|
||||||
|
- introduces Gitea CI workflows for type checks, build verification, multi-platform binary compilation, release creation, and npm publishing
|
||||||
|
- adds a reusable release template describing installation options, supported platforms, and checksum availability
|
||||||
|
|
||||||
|
## 2026-03-03 - 1.11.0 - feat(services)
|
||||||
|
map backend service data to UI components, add stats & logs parsing, fetch service stats, and fix logs request param
|
||||||
|
|
||||||
|
- Fix: rename service logs request property from 'lines' to 'tail' when calling typedRequest
|
||||||
|
- Add data transformation helpers: formatBytes, parseImageString, mapStatus, toServiceDetail, toServiceStats, parseLogs
|
||||||
|
- Transform service list and detail props to match @serve.zone/catalog component interfaces (map status, image, repo/tag, timestamps, registry)
|
||||||
|
- Dispatch fetchServiceStatsAction on service click and surface transformed stats with default values to avoid nulls
|
||||||
|
- Parse and normalize logs into timestamp/message pairs for the detail view
|
||||||
|
|
||||||
|
## 2026-03-02 - 1.10.3 - fix(bin)
|
||||||
|
make bin/onebox-wrapper.js executable
|
||||||
|
|
||||||
|
- Metadata-only change: file mode updated for bin/onebox-wrapper.js to include the executable bit
|
||||||
|
- No source or behavior changes to the code
|
||||||
|
|
||||||
|
## 2026-03-02 - 1.10.2 - fix(build)
|
||||||
|
update build/watch configuration, switch to esbuild bundler and tswatch, and bump catalog and tooling dependencies
|
||||||
|
|
||||||
|
- Switch watch script to 'tswatch' (replaced previous concurrently command invoking deno + tswatch).
|
||||||
|
- npmextra.json: set bundler to 'esbuild', enable production mode, include html/index.html in the bundle, and extend watchPatterns to include ./html/**/*.
|
||||||
|
- Backend watcher: expanded watch globs and changed command to include --unstable-ffi and runtime flags (--ephemeral --monitor); restart and debounce kept.
|
||||||
|
- Bump runtime deps: @design.estate/dees-catalog -> ^3.43.3, @serve.zone/catalog -> ^2.5.0.
|
||||||
|
- Bump devDependencies: @git.zone/tsbundle -> ^2.9.0, @git.zone/tswatch -> ^3.2.0.
|
||||||
|
|
||||||
|
## 2026-02-24 - 1.10.1 - fix(package.json)
|
||||||
|
update package metadata
|
||||||
|
|
||||||
|
- Single metadata-only file changed (+1 -1)
|
||||||
|
- No source code or runtime behavior modified; safe patch release
|
||||||
|
- Current package version is 1.10.0; recommend patch bump to 1.10.1
|
||||||
|
|
||||||
|
## 2026-02-24 - 1.10.0 - feat(opsserver)
|
||||||
|
introduce OpsServer (TypedRequest API) and new lightweight web UI; replace legacy Angular UI and add typed interfaces
|
||||||
|
|
||||||
|
- Add OpsServer (ts/opsserver) with TypedRequest handlers for admin, services, platform, dns, domains, registry, network, backups, schedules, settings and logs.
|
||||||
|
- Integrate typedrequest/typedserver and smartjwt/smartguard plugins (ts/plugins.ts) and add comprehensive ts_interfaces for requests and data shapes.
|
||||||
|
- Replace legacy HTTP server usage with OpsServer throughout daemon, Onebox class and CLI (ts/classes/daemon.ts, ts/classes/onebox.ts, ts/cli.ts).
|
||||||
|
- Implement log streaming via VirtualStream and support for downloading/restoring backups and registry token management within handlers.
|
||||||
|
- Introduce new web UI built with dees-element web components under ts_web (ob-app-shell and views) and bundle/watch tooling (npmextra.json, tsbundle/tswatch integration).
|
||||||
|
- Update package.json: add build/watch scripts, tsbundle/tswatch dev deps and new runtime dependencies for typedrequest and catalog components.
|
||||||
|
- Remove large Angular-based ui application and related services/components in ui/ (major cleanup of Angular code and assets).
|
||||||
|
- Note: This adds many new endpoints and internal API changes (TypedRequest-based); consumers of the old UI/HTTP endpoints should migrate to the new OpsServer TypedRequest API and web components.
|
||||||
|
|
||||||
|
## 2025-12-03 - 1.9.2 - fix(ui)
|
||||||
|
Add VS Code configs for the UI workspace and normalize dark theme CSS variables
|
||||||
|
|
||||||
|
- Add VS Code workspace files under ui/.vscode:
|
||||||
|
- - extensions.json: recommend the Angular language support extension
|
||||||
|
- - launch.json: Chrome launch configurations for 'ng serve' and 'ng test' (preLaunchTask hooks)
|
||||||
|
- - tasks.json: npm 'start' and 'test' tasks with a background TypeScript problem matcher to improve dev workflow
|
||||||
|
- Update ui/src/styles.css dark theme variables to use neutral black/gray HSL values for background, foreground, cards, popovers, accents, borders, inputs and ring to improve contrast and consistency
|
||||||
|
|
||||||
|
## 2025-11-27 - 1.9.1 - fix(ui)
|
||||||
|
Correct import success toast and add VS Code launch/tasks recommendations for the UI
|
||||||
|
|
||||||
|
- Fix backup import success toast in backups-tab.component to reference response.data.service.name (previously response.data.serviceName), preventing incorrect service name display.
|
||||||
|
- Add VS Code workspace settings for the UI: extensions recommendation, launch configurations for 'ng serve' and 'ng test', and npm tasks for start/test to simplify local development and debugging.
|
||||||
|
|
||||||
|
## 2025-11-27 - 1.9.0 - feat(backups)
|
||||||
|
Add backup import API and improve backup download/import flow in UI
|
||||||
|
|
||||||
|
- Backend: add /api/backups/import endpoint to accept multipart file uploads or JSON with a URL and import backups (saves temp file, validates .tar.enc, calls backupManager.restoreBackup in import mode).
|
||||||
|
- Backend: server-side import handler downloads remote backup URLs, stores temporary file, invokes restore/import logic and cleans up temp files.
|
||||||
|
- Frontend: add downloadBackup, importBackupFromFile and importBackupFromUrl methods to ApiService; trigger browser download using Blob and object URL with Authorization header.
|
||||||
|
- Frontend: replace raw download link in service detail UI with a Download button that calls downloadBackup and shows success/error toasts.
|
||||||
|
- Dev: add VS Code launch, tasks and recommended extensions for the ui workspace to simplify local development.
|
||||||
|
|
||||||
|
## 2025-11-27 - 1.8.0 - feat(backup)
|
||||||
|
Add backup scheduling system with GFS retention, API and UI integration
|
||||||
|
|
||||||
|
- Introduce backup scheduling subsystem (BackupScheduler) and integrate it into Onebox lifecycle (init & shutdown)
|
||||||
|
- Extend BackupManager.createBackup to accept schedule metadata (scheduleId) so scheduled runs are tracked
|
||||||
|
- Add GFS-style retention policy support (IRetentionPolicy + RETENTION_PRESETS) and expose per-tier retention in types
|
||||||
|
- Database migrations and repository changes: create backups and backup_schedules tables, add schedule_id, per-tier retention columns, and scope (all/pattern/service) support (migrations up to version 12)
|
||||||
|
- HTTP API: add backup schedule endpoints (GET/POST/PUT/DELETE /api/backup-schedules), trigger endpoint (/api/backup-schedules/:id/trigger), and service-scoped schedule endpoints
|
||||||
|
- UI: add API client methods for backup schedules and register a Backups tab in Services UI to surface schedules/backups
|
||||||
|
- Add task scheduling dependency (@push.rocks/taskbuffer) and export it via plugins.ts; update deno.json accordingly
|
||||||
|
- Type and repository updates across codebase to support schedule-aware backups, schedule CRUD, and retention enforcement
|
||||||
|
|
||||||
|
## 2025-11-27 - 1.7.0 - feat(backup)
|
||||||
|
Add backup system: BackupManager, DB schema, API endpoints and UI support
|
||||||
|
|
||||||
|
Introduce a complete service backup/restore subsystem with encrypted archives, database records and REST endpoints. Implements BackupManager with export/import for service config, platform resources (MongoDB, MinIO, ClickHouse), and Docker images; adds BackupRepository and migrations for backups table and include_image_in_backup; integrates backup flows into the HTTP API and the UI client; exposes backup password management and restore modes (restore/import/clone). Wire BackupManager into Onebox initialization.
|
||||||
|
|
||||||
|
- Add BackupManager implementing create/restore/export/import/encrypt/decrypt workflows (service config, platform resource dumps, Docker image export/import) and support for restore modes: restore, import, clone.
|
||||||
|
- Add BackupRepository and database migrations: create backups table and add include_image_in_backup column to services; database API methods for create/get/list/delete backups.
|
||||||
|
- Add HTTP API endpoints for backup management: list/create/get/download/delete backups, restore backups (/api/backups/restore) and backup password endpoints (/api/settings/backup-password).
|
||||||
|
- Update UI ApiService and types: add IBackup, IRestoreOptions, IRestoreResult, IBackupPasswordStatus and corresponding ApiService methods (getBackups, createBackup, getBackup, deleteBackup, getBackupDownloadUrl, restoreBackup, setBackupPassword, checkBackupPassword).
|
||||||
|
- Expose includeImageInBackup flag on service model and persist it in ServiceRepository (defaults to true for existing rows); service update flow supports toggling this option.
|
||||||
|
- Integrate BackupManager into Onebox core (initialized in Onebox constructor) and wire HTTP handlers to use the new manager; add DB repository export/import glue so backups are stored and referenced by ID.
|
||||||
|
|
||||||
## 2025-11-27 - 1.6.0 - feat(ui.dashboard)
|
## 2025-11-27 - 1.6.0 - feat(ui.dashboard)
|
||||||
Add Resource Usage card to dashboard and make dashboard cards full-height; add VSCode launch/tasks/config
|
Add Resource Usage card to dashboard and make dashboard cards full-height; add VSCode launch/tasks/config
|
||||||
|
|
||||||
|
|||||||
15
deno.json
15
deno.json
@@ -1,12 +1,11 @@
|
|||||||
{
|
{
|
||||||
"name": "@serve.zone/onebox",
|
"name": "@serve.zone/onebox",
|
||||||
"version": "1.6.0",
|
"version": "1.14.1",
|
||||||
"exports": "./mod.ts",
|
"exports": "./mod.ts",
|
||||||
"nodeModulesDir": "auto",
|
|
||||||
"tasks": {
|
"tasks": {
|
||||||
"test": "deno test --allow-all test/",
|
"test": "deno test --allow-all test/",
|
||||||
"test:watch": "deno test --allow-all --watch test/",
|
"test:watch": "deno test --allow-all --watch test/",
|
||||||
"compile": "bash scripts/compile-all.sh",
|
"compile": "tsdeno compile",
|
||||||
"dev": "pnpm run watch"
|
"dev": "pnpm run watch"
|
||||||
},
|
},
|
||||||
"imports": {
|
"imports": {
|
||||||
@@ -17,11 +16,17 @@
|
|||||||
"@std/encoding": "jsr:@std/encoding@^1.0.10",
|
"@std/encoding": "jsr:@std/encoding@^1.0.10",
|
||||||
"@db/sqlite": "jsr:@db/sqlite@0.12.0",
|
"@db/sqlite": "jsr:@db/sqlite@0.12.0",
|
||||||
"@push.rocks/smartdaemon": "npm:@push.rocks/smartdaemon@^2.1.0",
|
"@push.rocks/smartdaemon": "npm:@push.rocks/smartdaemon@^2.1.0",
|
||||||
"@apiclient.xyz/docker": "npm:@apiclient.xyz/docker@^5.1.0",
|
"@apiclient.xyz/docker": "npm:@apiclient.xyz/docker@^5.1.1",
|
||||||
"@apiclient.xyz/cloudflare": "npm:@apiclient.xyz/cloudflare@6.4.3",
|
"@apiclient.xyz/cloudflare": "npm:@apiclient.xyz/cloudflare@6.4.3",
|
||||||
"@push.rocks/smartacme": "npm:@push.rocks/smartacme@^8.0.0",
|
"@push.rocks/smartacme": "npm:@push.rocks/smartacme@^8.0.0",
|
||||||
"@push.rocks/smartregistry": "npm:@push.rocks/smartregistry@^2.2.0",
|
"@push.rocks/smartregistry": "npm:@push.rocks/smartregistry@^2.2.0",
|
||||||
"@push.rocks/smarts3": "npm:@push.rocks/smarts3@^5.1.0"
|
"@push.rocks/smarts3": "npm:@push.rocks/smarts3@^5.1.0",
|
||||||
|
"@push.rocks/taskbuffer": "npm:@push.rocks/taskbuffer@^3.1.0",
|
||||||
|
"@api.global/typedrequest-interfaces": "npm:@api.global/typedrequest-interfaces@^3.0.19",
|
||||||
|
"@api.global/typedrequest": "npm:@api.global/typedrequest@^3.2.6",
|
||||||
|
"@api.global/typedserver": "npm:@api.global/typedserver@^8.3.1",
|
||||||
|
"@push.rocks/smartguard": "npm:@push.rocks/smartguard@^3.1.0",
|
||||||
|
"@push.rocks/smartjwt": "npm:@push.rocks/smartjwt@^2.2.1"
|
||||||
},
|
},
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"lib": [
|
"lib": [
|
||||||
|
|||||||
36196
dist_serve/bundle.js
Normal file
36196
dist_serve/bundle.js
Normal file
File diff suppressed because one or more lines are too long
33
dist_serve/index.html
Normal file
33
dist_serve/index.html
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta
|
||||||
|
name="viewport"
|
||||||
|
content="user-scalable=0, initial-scale=1, maximum-scale=1, minimum-scale=1, width=device-width, height=device-height"
|
||||||
|
/>
|
||||||
|
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||||
|
<meta name="theme-color" content="#000000" />
|
||||||
|
<title>Onebox</title>
|
||||||
|
<link rel="preconnect" href="https://assetbroker.lossless.one/" crossorigin>
|
||||||
|
<link rel="stylesheet" href="https://assetbroker.lossless.one/fonts/fonts.css">
|
||||||
|
<style>
|
||||||
|
html {
|
||||||
|
-ms-text-size-adjust: 100%;
|
||||||
|
-webkit-text-size-adjust: 100%;
|
||||||
|
}
|
||||||
|
body {
|
||||||
|
position: relative;
|
||||||
|
background: #000;
|
||||||
|
margin: 0px;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<noscript>
|
||||||
|
<p style="color: #fff; text-align: center; margin-top: 100px;">
|
||||||
|
JavaScript is required to run the Onebox dashboard.
|
||||||
|
</p>
|
||||||
|
</noscript>
|
||||||
|
</body>
|
||||||
|
<script defer type="module" src="/bundle.js"></script>
|
||||||
|
</html>
|
||||||
33
html/index.html
Normal file
33
html/index.html
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta
|
||||||
|
name="viewport"
|
||||||
|
content="user-scalable=0, initial-scale=1, maximum-scale=1, minimum-scale=1, width=device-width, height=device-height"
|
||||||
|
/>
|
||||||
|
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||||
|
<meta name="theme-color" content="#000000" />
|
||||||
|
<title>Onebox</title>
|
||||||
|
<link rel="preconnect" href="https://assetbroker.lossless.one/" crossorigin>
|
||||||
|
<link rel="stylesheet" href="https://assetbroker.lossless.one/fonts/fonts.css">
|
||||||
|
<style>
|
||||||
|
html {
|
||||||
|
-ms-text-size-adjust: 100%;
|
||||||
|
-webkit-text-size-adjust: 100%;
|
||||||
|
}
|
||||||
|
body {
|
||||||
|
position: relative;
|
||||||
|
background: #000;
|
||||||
|
margin: 0px;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<noscript>
|
||||||
|
<p style="color: #fff; text-align: center; margin-top: 100px;">
|
||||||
|
JavaScript is required to run the Onebox dashboard.
|
||||||
|
</p>
|
||||||
|
</noscript>
|
||||||
|
</body>
|
||||||
|
<script defer type="module" src="/bundle.js"></script>
|
||||||
|
</html>
|
||||||
446
install.sh
446
install.sh
@@ -1,192 +1,308 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Onebox Installer Script
|
||||||
|
# Downloads and installs pre-compiled Onebox binary from Gitea releases
|
||||||
#
|
#
|
||||||
# Onebox installer script
|
# Usage:
|
||||||
|
# Direct piped installation (recommended):
|
||||||
|
# curl -sSL https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh | sudo bash
|
||||||
#
|
#
|
||||||
|
# With version specification:
|
||||||
|
# curl -sSL https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh | sudo bash -s -- --version v1.11.0
|
||||||
|
#
|
||||||
|
# Options:
|
||||||
|
# -h, --help Show this help message
|
||||||
|
# --version VERSION Install specific version (e.g., v1.11.0)
|
||||||
|
# --install-dir DIR Installation directory (default: /opt/onebox)
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
# Configuration
|
# Default values
|
||||||
REPO_URL="https://code.foss.global/serve.zone/onebox"
|
SHOW_HELP=0
|
||||||
|
SPECIFIED_VERSION=""
|
||||||
INSTALL_DIR="/opt/onebox"
|
INSTALL_DIR="/opt/onebox"
|
||||||
BIN_LINK="/usr/local/bin/onebox"
|
GITEA_BASE_URL="https://code.foss.global"
|
||||||
|
GITEA_REPO="serve.zone/onebox"
|
||||||
|
SERVICE_NAME="smartdaemon_onebox"
|
||||||
|
|
||||||
# Colors
|
# Parse command line arguments
|
||||||
RED='\033[0;31m'
|
while [[ $# -gt 0 ]]; do
|
||||||
GREEN='\033[0;32m'
|
case $1 in
|
||||||
YELLOW='\033[1;33m'
|
-h|--help)
|
||||||
NC='\033[0m' # No Color
|
SHOW_HELP=1
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--version)
|
||||||
|
SPECIFIED_VERSION="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--install-dir)
|
||||||
|
INSTALL_DIR="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unknown option: $1"
|
||||||
|
echo "Use -h or --help for usage information"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
# Functions
|
if [ $SHOW_HELP -eq 1 ]; then
|
||||||
error() {
|
echo "Onebox Installer Script"
|
||||||
echo -e "${RED}Error: $1${NC}" >&2
|
echo "Downloads and installs pre-compiled Onebox binary"
|
||||||
exit 1
|
echo ""
|
||||||
}
|
echo "Usage: $0 [options]"
|
||||||
|
echo ""
|
||||||
info() {
|
echo "Options:"
|
||||||
echo -e "${GREEN}$1${NC}"
|
echo " -h, --help Show this help message"
|
||||||
}
|
echo " --version VERSION Install specific version (e.g., v1.11.0)"
|
||||||
|
echo " --install-dir DIR Installation directory (default: /opt/onebox)"
|
||||||
warn() {
|
echo ""
|
||||||
echo -e "${YELLOW}$1${NC}"
|
echo "Examples:"
|
||||||
}
|
echo " # Install latest version"
|
||||||
|
echo " curl -sSL https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh | sudo bash"
|
||||||
# Detect platform and architecture
|
echo ""
|
||||||
detect_platform() {
|
echo " # Install specific version"
|
||||||
OS=$(uname -s | tr '[:upper:]' '[:lower:]')
|
echo " curl -sSL https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh | sudo bash -s -- --version v1.11.0"
|
||||||
ARCH=$(uname -m)
|
exit 0
|
||||||
|
fi
|
||||||
case "$OS" in
|
|
||||||
linux)
|
|
||||||
PLATFORM="linux"
|
|
||||||
;;
|
|
||||||
darwin)
|
|
||||||
PLATFORM="macos"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
error "Unsupported operating system: $OS"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
case "$ARCH" in
|
|
||||||
x86_64|amd64)
|
|
||||||
ARCH="x64"
|
|
||||||
;;
|
|
||||||
aarch64|arm64)
|
|
||||||
ARCH="arm64"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
error "Unsupported architecture: $ARCH"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
BINARY_NAME="onebox-${PLATFORM}-${ARCH}"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Get latest version from Gitea API
|
|
||||||
get_latest_version() {
|
|
||||||
info "Fetching latest version..."
|
|
||||||
VERSION=$(curl -s "${REPO_URL}/releases" | grep -o '"tag_name":"v[^"]*' | head -1 | cut -d'"' -f4 | cut -c2-)
|
|
||||||
|
|
||||||
if [ -z "$VERSION" ]; then
|
|
||||||
warn "Could not fetch latest version, using 'main' branch"
|
|
||||||
VERSION="main"
|
|
||||||
else
|
|
||||||
info "Latest version: v${VERSION}"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Check if running as root
|
# Check if running as root
|
||||||
check_root() {
|
if [ "$EUID" -ne 0 ]; then
|
||||||
if [ "$EUID" -ne 0 ]; then
|
echo "Please run as root (sudo bash install.sh or pipe to sudo bash)"
|
||||||
error "This script must be run as root (use sudo)"
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Helper function to detect OS and architecture
|
||||||
|
detect_platform() {
|
||||||
|
local os=$(uname -s)
|
||||||
|
local arch=$(uname -m)
|
||||||
|
|
||||||
|
# Map OS
|
||||||
|
case "$os" in
|
||||||
|
Linux)
|
||||||
|
os_name="linux"
|
||||||
|
;;
|
||||||
|
Darwin)
|
||||||
|
os_name="macos"
|
||||||
|
;;
|
||||||
|
MINGW*|MSYS*|CYGWIN*)
|
||||||
|
os_name="windows"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Error: Unsupported operating system: $os"
|
||||||
|
echo "Supported: Linux, macOS, Windows"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# Map architecture
|
||||||
|
case "$arch" in
|
||||||
|
x86_64|amd64)
|
||||||
|
arch_name="x64"
|
||||||
|
;;
|
||||||
|
aarch64|arm64)
|
||||||
|
arch_name="arm64"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Error: Unsupported architecture: $arch"
|
||||||
|
echo "Supported: x86_64/amd64 (x64), aarch64/arm64 (arm64)"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# Construct binary name
|
||||||
|
if [ "$os_name" = "windows" ]; then
|
||||||
|
echo "onebox-${os_name}-${arch_name}.exe"
|
||||||
|
else
|
||||||
|
echo "onebox-${os_name}-${arch_name}"
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Get latest release version from Gitea API
|
||||||
|
get_latest_version() {
|
||||||
|
echo "Fetching latest release version from Gitea..." >&2
|
||||||
|
|
||||||
|
local api_url="${GITEA_BASE_URL}/api/v1/repos/${GITEA_REPO}/releases/latest"
|
||||||
|
local response=$(curl -sSL "$api_url" 2>/dev/null)
|
||||||
|
|
||||||
|
if [ $? -ne 0 ] || [ -z "$response" ]; then
|
||||||
|
echo "Error: Failed to fetch latest release information from Gitea API" >&2
|
||||||
|
echo "URL: $api_url" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Extract tag_name from JSON response
|
||||||
|
local version=$(echo "$response" | grep -o '"tag_name":"[^"]*"' | cut -d'"' -f4)
|
||||||
|
|
||||||
|
if [ -z "$version" ]; then
|
||||||
|
echo "Error: Could not determine latest version from API response" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "$version"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main installation process
|
||||||
|
echo "================================================"
|
||||||
|
echo " Onebox Installation Script"
|
||||||
|
echo "================================================"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Detect platform
|
||||||
|
BINARY_NAME=$(detect_platform)
|
||||||
|
echo "Detected platform: $BINARY_NAME"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Determine version to install
|
||||||
|
if [ -n "$SPECIFIED_VERSION" ]; then
|
||||||
|
VERSION="$SPECIFIED_VERSION"
|
||||||
|
echo "Installing specified version: $VERSION"
|
||||||
|
else
|
||||||
|
VERSION=$(get_latest_version)
|
||||||
|
echo "Installing latest version: $VERSION"
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Construct download URL
|
||||||
|
DOWNLOAD_URL="${GITEA_BASE_URL}/${GITEA_REPO}/releases/download/${VERSION}/${BINARY_NAME}"
|
||||||
|
echo "Download URL: $DOWNLOAD_URL"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Check if service is running and stop it
|
||||||
|
SERVICE_WAS_RUNNING=0
|
||||||
|
if systemctl is-enabled --quiet "$SERVICE_NAME" 2>/dev/null || systemctl is-active --quiet "$SERVICE_NAME" 2>/dev/null; then
|
||||||
|
SERVICE_WAS_RUNNING=1
|
||||||
|
if systemctl is-active --quiet "$SERVICE_NAME" 2>/dev/null; then
|
||||||
|
echo "Stopping Onebox service..."
|
||||||
|
systemctl stop "$SERVICE_NAME"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean installation directory - ensure only binary exists
|
||||||
|
if [ -d "$INSTALL_DIR" ]; then
|
||||||
|
echo "Cleaning installation directory: $INSTALL_DIR"
|
||||||
|
rm -rf "$INSTALL_DIR"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create fresh installation directory
|
||||||
|
echo "Creating installation directory: $INSTALL_DIR"
|
||||||
|
mkdir -p "$INSTALL_DIR"
|
||||||
|
|
||||||
# Download binary
|
# Download binary
|
||||||
download_binary() {
|
echo "Downloading Onebox binary..."
|
||||||
info "Downloading Onebox ${VERSION} for ${PLATFORM}-${ARCH}..."
|
TEMP_FILE="$INSTALL_DIR/onebox.download"
|
||||||
|
curl -sSL "$DOWNLOAD_URL" -o "$TEMP_FILE"
|
||||||
|
|
||||||
# Create temp directory
|
if [ $? -ne 0 ]; then
|
||||||
TMP_DIR=$(mktemp -d)
|
echo "Error: Failed to download binary from $DOWNLOAD_URL"
|
||||||
TMP_FILE="${TMP_DIR}/${BINARY_NAME}"
|
echo ""
|
||||||
|
echo "Please check:"
|
||||||
|
echo " 1. Your internet connection"
|
||||||
|
echo " 2. The specified version exists: ${GITEA_BASE_URL}/${GITEA_REPO}/releases"
|
||||||
|
echo " 3. The platform binary is available for this release"
|
||||||
|
rm -f "$TEMP_FILE"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
# Try release download first
|
# Check if download was successful (file exists and not empty)
|
||||||
if [ "$VERSION" != "main" ]; then
|
if [ ! -s "$TEMP_FILE" ]; then
|
||||||
DOWNLOAD_URL="${REPO_URL}/releases/download/v${VERSION}/${BINARY_NAME}"
|
echo "Error: Downloaded file is empty or does not exist"
|
||||||
else
|
rm -f "$TEMP_FILE"
|
||||||
DOWNLOAD_URL="${REPO_URL}/raw/branch/main/dist/binaries/${BINARY_NAME}"
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! curl -L -f -o "$TMP_FILE" "$DOWNLOAD_URL"; then
|
# Move to final location
|
||||||
error "Failed to download binary from $DOWNLOAD_URL"
|
BINARY_PATH="$INSTALL_DIR/onebox"
|
||||||
fi
|
mv "$TEMP_FILE" "$BINARY_PATH"
|
||||||
|
|
||||||
# Verify download
|
if [ $? -ne 0 ] || [ ! -f "$BINARY_PATH" ]; then
|
||||||
if [ ! -f "$TMP_FILE" ] || [ ! -s "$TMP_FILE" ]; then
|
echo "Error: Failed to move binary to $BINARY_PATH"
|
||||||
error "Downloaded file is empty or missing"
|
rm -f "$TEMP_FILE" 2>/dev/null
|
||||||
fi
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
info "✓ Download complete"
|
# Make executable
|
||||||
}
|
chmod +x "$BINARY_PATH"
|
||||||
|
|
||||||
# Install binary
|
if [ $? -ne 0 ]; then
|
||||||
install_binary() {
|
echo "Error: Failed to make binary executable"
|
||||||
info "Installing Onebox to ${INSTALL_DIR}..."
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
# Create install directory
|
echo "Binary installed successfully to: $BINARY_PATH"
|
||||||
mkdir -p "$INSTALL_DIR"
|
echo ""
|
||||||
|
|
||||||
# Copy binary
|
# Check if /usr/local/bin is in PATH
|
||||||
cp "$TMP_FILE" "${INSTALL_DIR}/onebox"
|
if [[ ":$PATH:" == *":/usr/local/bin:"* ]]; then
|
||||||
chmod +x "${INSTALL_DIR}/onebox"
|
BIN_DIR="/usr/local/bin"
|
||||||
|
else
|
||||||
|
BIN_DIR="/usr/bin"
|
||||||
|
fi
|
||||||
|
|
||||||
# Create symlink
|
# Create symlink for global access
|
||||||
ln -sf "${INSTALL_DIR}/onebox" "$BIN_LINK"
|
ln -sf "$BINARY_PATH" "$BIN_DIR/onebox"
|
||||||
|
echo "Symlink created: $BIN_DIR/onebox -> $BINARY_PATH"
|
||||||
|
echo ""
|
||||||
|
|
||||||
# Cleanup temp files
|
# Create data directories
|
||||||
rm -rf "$TMP_DIR"
|
mkdir -p /var/lib/onebox
|
||||||
|
mkdir -p /var/www/certbot
|
||||||
|
|
||||||
info "✓ Installation complete"
|
# Restart service if it was running before update
|
||||||
}
|
if [ $SERVICE_WAS_RUNNING -eq 1 ]; then
|
||||||
|
echo "Restarting Onebox service..."
|
||||||
|
systemctl restart "$SERVICE_NAME"
|
||||||
|
echo "Service restarted successfully."
|
||||||
|
echo ""
|
||||||
|
fi
|
||||||
|
|
||||||
# Initialize database and config
|
echo "================================================"
|
||||||
initialize() {
|
echo " Onebox Installation Complete!"
|
||||||
info "Initializing Onebox..."
|
echo "================================================"
|
||||||
|
echo ""
|
||||||
|
echo "Installation details:"
|
||||||
|
echo " Binary location: $BINARY_PATH"
|
||||||
|
echo " Symlink location: $BIN_DIR/onebox"
|
||||||
|
echo " Version: $VERSION"
|
||||||
|
echo ""
|
||||||
|
|
||||||
# Create data directory
|
# Check if database exists (indicates existing installation)
|
||||||
mkdir -p /var/lib/onebox
|
if [ -f "/var/lib/onebox/onebox.db" ]; then
|
||||||
|
echo "Data directory: /var/lib/onebox (preserved)"
|
||||||
# Create certbot directory for ACME challenges
|
echo ""
|
||||||
mkdir -p /var/www/certbot
|
echo "Your existing data has been preserved."
|
||||||
|
if [ $SERVICE_WAS_RUNNING -eq 1 ]; then
|
||||||
info "✓ Initialization complete"
|
echo "The service has been restarted with your current settings."
|
||||||
}
|
else
|
||||||
|
echo "Start the service with: onebox daemon start"
|
||||||
# Print success message
|
fi
|
||||||
print_success() {
|
else
|
||||||
echo ""
|
echo "Get started:"
|
||||||
info "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
echo ""
|
||||||
info " Onebox installed successfully!"
|
echo " onebox --version"
|
||||||
info "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
echo " onebox --help"
|
||||||
echo ""
|
echo ""
|
||||||
echo "Next steps:"
|
echo " 1. Configure Cloudflare (optional):"
|
||||||
echo ""
|
echo " onebox config set cloudflareAPIKey <key>"
|
||||||
echo "1. Configure Cloudflare (optional):"
|
echo " onebox config set cloudflareEmail <email>"
|
||||||
echo " onebox config set cloudflareAPIKey <key>"
|
echo " onebox config set cloudflareZoneID <zone-id>"
|
||||||
echo " onebox config set cloudflareEmail <email>"
|
echo " onebox config set serverIP <your-server-ip>"
|
||||||
echo " onebox config set cloudflareZoneID <zone-id>"
|
echo ""
|
||||||
echo " onebox config set serverIP <your-server-ip>"
|
echo " 2. Configure ACME email:"
|
||||||
echo ""
|
echo " onebox config set acmeEmail <your@email.com>"
|
||||||
echo "2. Configure ACME email:"
|
echo ""
|
||||||
echo " onebox config set acmeEmail <your@email.com>"
|
echo " 3. Install daemon:"
|
||||||
echo ""
|
echo " onebox daemon install"
|
||||||
echo "3. Install daemon:"
|
echo ""
|
||||||
echo " onebox daemon install"
|
echo " 4. Start daemon:"
|
||||||
echo ""
|
echo " onebox daemon start"
|
||||||
echo "4. Start daemon:"
|
echo ""
|
||||||
echo " onebox daemon start"
|
echo " 5. Deploy your first service:"
|
||||||
echo ""
|
echo " onebox service add myapp --image nginx:latest --domain app.example.com"
|
||||||
echo "5. Deploy your first service:"
|
echo ""
|
||||||
echo " onebox service add myapp --image nginx:latest --domain app.example.com"
|
echo " Web UI: http://localhost:3000"
|
||||||
echo ""
|
echo " Default credentials: admin / admin"
|
||||||
echo "Web UI: http://localhost:3000"
|
fi
|
||||||
echo "Default credentials: admin / admin"
|
echo ""
|
||||||
echo ""
|
|
||||||
}
|
|
||||||
|
|
||||||
# Main installation flow
|
|
||||||
main() {
|
|
||||||
info "Onebox Installer"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
check_root
|
|
||||||
detect_platform
|
|
||||||
get_latest_version
|
|
||||||
download_binary
|
|
||||||
install_binary
|
|
||||||
initialize
|
|
||||||
print_success
|
|
||||||
}
|
|
||||||
|
|
||||||
# Run main function
|
|
||||||
main
|
|
||||||
|
|||||||
57
npmextra.json
Normal file
57
npmextra.json
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
{
|
||||||
|
"@git.zone/tsbundle": {
|
||||||
|
"bundles": [
|
||||||
|
{
|
||||||
|
"from": "./ts_web/index.ts",
|
||||||
|
"to": "./ts_bundled/bundle.ts",
|
||||||
|
"outputMode": "base64ts",
|
||||||
|
"bundler": "esbuild",
|
||||||
|
"production": true,
|
||||||
|
"includeFiles": [{"from": "./html/index.html", "to": "index.html"}]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"@git.zone/tsdeno": {
|
||||||
|
"compileTargets": [
|
||||||
|
{
|
||||||
|
"name": "onebox-linux-x64",
|
||||||
|
"entryPoint": "mod.ts",
|
||||||
|
"outDir": "dist/binaries",
|
||||||
|
"target": "x86_64-unknown-linux-gnu",
|
||||||
|
"permissions": ["--allow-all"],
|
||||||
|
"noCheck": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "onebox-linux-arm64",
|
||||||
|
"entryPoint": "mod.ts",
|
||||||
|
"outDir": "dist/binaries",
|
||||||
|
"target": "aarch64-unknown-linux-gnu",
|
||||||
|
"permissions": ["--allow-all"],
|
||||||
|
"noCheck": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"@git.zone/tswatch": {
|
||||||
|
"bundles": [
|
||||||
|
{
|
||||||
|
"from": "./ts_web/index.ts",
|
||||||
|
"to": "./ts_bundled/bundle.ts",
|
||||||
|
"outputMode": "base64ts",
|
||||||
|
"bundler": "esbuild",
|
||||||
|
"production": true,
|
||||||
|
"watchPatterns": ["./ts_web/**/*", "./html/**/*"],
|
||||||
|
"includeFiles": [{"from": "./html/index.html", "to": "index.html"}]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"watchers": [
|
||||||
|
{
|
||||||
|
"name": "backend",
|
||||||
|
"watch": ["./ts/**/*", "./ts_interfaces/**/*", "./ts_bundled/**/*"],
|
||||||
|
"command": "deno run --allow-all --unstable-ffi mod.ts server --ephemeral --monitor",
|
||||||
|
"restart": true,
|
||||||
|
"debounce": 500,
|
||||||
|
"runOnStart": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
17
package.json
17
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@serve.zone/onebox",
|
"name": "@serve.zone/onebox",
|
||||||
"version": "1.6.0",
|
"version": "1.14.1",
|
||||||
"description": "Self-hosted container platform with automatic SSL and DNS - a mini Heroku for single servers",
|
"description": "Self-hosted container platform with automatic SSL and DNS - a mini Heroku for single servers",
|
||||||
"main": "mod.ts",
|
"main": "mod.ts",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
@@ -9,7 +9,9 @@
|
|||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"postinstall": "node scripts/install-binary.js",
|
"postinstall": "node scripts/install-binary.js",
|
||||||
"watch": "concurrently --kill-others --names \"BACKEND,UI\" --prefix-colors \"cyan,magenta\" \"deno run --allow-all --unstable-ffi --watch mod.ts server --ephemeral --monitor\" \"cd ui && pnpm run watch\""
|
"watch": "tswatch",
|
||||||
|
"build": "tsbundle",
|
||||||
|
"bundle": "tsbundle"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"docker",
|
"docker",
|
||||||
@@ -51,8 +53,15 @@
|
|||||||
"arm64"
|
"arm64"
|
||||||
],
|
],
|
||||||
"packageManager": "pnpm@10.18.1+sha512.77a884a165cbba2d8d1c19e3b4880eee6d2fcabd0d879121e282196b80042351d5eb3ca0935fa599da1dc51265cc68816ad2bddd2a2de5ea9fdf92adbec7cd34",
|
"packageManager": "pnpm@10.18.1+sha512.77a884a165cbba2d8d1c19e3b4880eee6d2fcabd0d879121e282196b80042351d5eb3ca0935fa599da1dc51265cc68816ad2bddd2a2de5ea9fdf92adbec7cd34",
|
||||||
"dependencies": {},
|
"dependencies": {
|
||||||
|
"@api.global/typedrequest-interfaces": "^3.0.19",
|
||||||
|
"@design.estate/dees-catalog": "^3.43.3",
|
||||||
|
"@design.estate/dees-element": "^2.1.6",
|
||||||
|
"@serve.zone/catalog": "^2.5.0"
|
||||||
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"concurrently": "^9.1.2"
|
"@git.zone/tsbundle": "^2.9.0",
|
||||||
|
"@git.zone/tsdeno": "^1.2.0",
|
||||||
|
"@git.zone/tswatch": "^3.2.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
5179
pnpm-lock.yaml
generated
5179
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
16
readme.md
16
readme.md
@@ -47,10 +47,11 @@ For reporting bugs, issues, or security vulnerabilities, please visit [community
|
|||||||
### Installation
|
### Installation
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Download the latest release for your platform
|
# One-line install (recommended)
|
||||||
curl -sSL https://code.foss.global/serve.zone/onebox/releases/latest/download/onebox-linux-x64 -o onebox
|
curl -sSL https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh | sudo bash
|
||||||
chmod +x onebox
|
|
||||||
sudo mv onebox /usr/local/bin/
|
# Install a specific version
|
||||||
|
curl -sSL https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh | sudo bash -s -- --version v1.11.0
|
||||||
|
|
||||||
# Or install from npm
|
# Or install from npm
|
||||||
pnpm install -g @serve.zone/onebox
|
pnpm install -g @serve.zone/onebox
|
||||||
@@ -242,6 +243,13 @@ onebox config set cloudflareZoneID your-zone-id
|
|||||||
onebox status
|
onebox status
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Upgrade
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Upgrade to the latest version (requires root)
|
||||||
|
sudo onebox upgrade
|
||||||
|
```
|
||||||
|
|
||||||
## Configuration 🔧
|
## Configuration 🔧
|
||||||
|
|
||||||
### System Requirements
|
### System Requirements
|
||||||
|
|||||||
@@ -1,56 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
#
|
|
||||||
# Compile Onebox for all platforms
|
|
||||||
#
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
VERSION=$(grep '"version"' deno.json | cut -d'"' -f4)
|
|
||||||
echo "Compiling Onebox v${VERSION} for all platforms..."
|
|
||||||
|
|
||||||
# Create dist directory
|
|
||||||
mkdir -p dist/binaries
|
|
||||||
|
|
||||||
# Compile for each platform
|
|
||||||
echo "Compiling for Linux x64..."
|
|
||||||
deno compile --allow-all --no-check \
|
|
||||||
--output "dist/binaries/onebox-linux-x64" \
|
|
||||||
--target x86_64-unknown-linux-gnu \
|
|
||||||
mod.ts
|
|
||||||
|
|
||||||
echo "Compiling for Linux ARM64..."
|
|
||||||
deno compile --allow-all --no-check \
|
|
||||||
--output "dist/binaries/onebox-linux-arm64" \
|
|
||||||
--target aarch64-unknown-linux-gnu \
|
|
||||||
mod.ts
|
|
||||||
|
|
||||||
echo "Compiling for macOS x64..."
|
|
||||||
deno compile --allow-all --no-check \
|
|
||||||
--output "dist/binaries/onebox-macos-x64" \
|
|
||||||
--target x86_64-apple-darwin \
|
|
||||||
mod.ts
|
|
||||||
|
|
||||||
echo "Compiling for macOS ARM64..."
|
|
||||||
deno compile --allow-all --no-check \
|
|
||||||
--output "dist/binaries/onebox-macos-arm64" \
|
|
||||||
--target aarch64-apple-darwin \
|
|
||||||
mod.ts
|
|
||||||
|
|
||||||
echo "Compiling for Windows x64..."
|
|
||||||
deno compile --allow-all --no-check \
|
|
||||||
--output "dist/binaries/onebox-windows-x64.exe" \
|
|
||||||
--target x86_64-pc-windows-msvc \
|
|
||||||
mod.ts
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "✓ Compilation complete!"
|
|
||||||
echo ""
|
|
||||||
echo "Binaries:"
|
|
||||||
ls -lh dist/binaries/
|
|
||||||
echo ""
|
|
||||||
echo "Next steps:"
|
|
||||||
echo "1. Test binaries on their respective platforms"
|
|
||||||
echo "2. Create git tag: git tag v${VERSION}"
|
|
||||||
echo "3. Push tag: git push origin v${VERSION}"
|
|
||||||
echo "4. Upload binaries to Gitea release"
|
|
||||||
echo "5. Publish to npm: pnpm publish"
|
|
||||||
@@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@serve.zone/onebox',
|
name: '@serve.zone/onebox',
|
||||||
version: '1.6.0',
|
version: '1.14.1',
|
||||||
description: 'Self-hosted container platform with automatic SSL and DNS - a mini Heroku for single servers'
|
description: 'Self-hosted container platform with automatic SSL and DNS - a mini Heroku for single servers'
|
||||||
}
|
}
|
||||||
|
|||||||
1117
ts/classes/backup-manager.ts
Normal file
1117
ts/classes/backup-manager.ts
Normal file
File diff suppressed because it is too large
Load Diff
650
ts/classes/backup-scheduler.ts
Normal file
650
ts/classes/backup-scheduler.ts
Normal file
@@ -0,0 +1,650 @@
|
|||||||
|
/**
|
||||||
|
* Backup Scheduler for Onebox
|
||||||
|
*
|
||||||
|
* Uses @push.rocks/taskbuffer for cron-based scheduled backups
|
||||||
|
* with GFS (Grandfather-Father-Son) time-window based retention scheme.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import * as plugins from '../plugins.ts';
|
||||||
|
import type {
|
||||||
|
IBackupSchedule,
|
||||||
|
IBackupScheduleCreate,
|
||||||
|
IBackupScheduleUpdate,
|
||||||
|
IService,
|
||||||
|
IRetentionPolicy,
|
||||||
|
} from '../types.ts';
|
||||||
|
import { RETENTION_PRESETS } from '../types.ts';
|
||||||
|
import { logger } from '../logging.ts';
|
||||||
|
import { getErrorMessage } from '../utils/error.ts';
|
||||||
|
import type { Onebox } from './onebox.ts';
|
||||||
|
|
||||||
|
export class BackupScheduler {
|
||||||
|
private oneboxRef: Onebox;
|
||||||
|
private taskManager!: plugins.taskbuffer.TaskManager;
|
||||||
|
private scheduledTasks: Map<number, plugins.taskbuffer.Task> = new Map();
|
||||||
|
private initialized = false;
|
||||||
|
|
||||||
|
constructor(oneboxRef: Onebox) {
|
||||||
|
this.oneboxRef = oneboxRef;
|
||||||
|
// TaskManager is created in init() to avoid log spam before ready
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize the scheduler and load enabled schedules
|
||||||
|
*/
|
||||||
|
async init(): Promise<void> {
|
||||||
|
if (this.initialized) {
|
||||||
|
logger.warn('BackupScheduler already initialized');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Create TaskManager here (not in constructor) to avoid "no cronjobs" log spam
|
||||||
|
this.taskManager = new plugins.taskbuffer.TaskManager();
|
||||||
|
|
||||||
|
// Add heartbeat task immediately to prevent "no cronjobs specified" log spam
|
||||||
|
// This runs hourly and does nothing, but keeps taskbuffer happy
|
||||||
|
const heartbeatTask = new plugins.taskbuffer.Task({
|
||||||
|
name: 'backup-scheduler-heartbeat',
|
||||||
|
taskFunction: async () => {
|
||||||
|
// No-op heartbeat task
|
||||||
|
},
|
||||||
|
});
|
||||||
|
this.taskManager.addAndScheduleTask(heartbeatTask, '0 * * * *'); // Hourly
|
||||||
|
|
||||||
|
// Load all enabled schedules from database
|
||||||
|
const schedules = this.oneboxRef.database.getEnabledBackupSchedules();
|
||||||
|
|
||||||
|
for (const schedule of schedules) {
|
||||||
|
await this.registerTask(schedule);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start the task manager (activates cron scheduling)
|
||||||
|
await this.taskManager.start();
|
||||||
|
|
||||||
|
this.initialized = true;
|
||||||
|
logger.info(`Backup scheduler started with ${schedules.length} enabled schedule(s)`);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to initialize backup scheduler: ${getErrorMessage(error)}`);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop the scheduler
|
||||||
|
*/
|
||||||
|
async stop(): Promise<void> {
|
||||||
|
if (!this.initialized || !this.taskManager) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await this.taskManager.stop();
|
||||||
|
this.scheduledTasks.clear();
|
||||||
|
this.initialized = false;
|
||||||
|
logger.info('Backup scheduler stopped');
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to stop backup scheduler: ${getErrorMessage(error)}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new backup schedule
|
||||||
|
*/
|
||||||
|
async createSchedule(request: IBackupScheduleCreate): Promise<IBackupSchedule> {
|
||||||
|
// Validate based on scope type
|
||||||
|
let serviceId: number | undefined;
|
||||||
|
let serviceName: string | undefined;
|
||||||
|
|
||||||
|
switch (request.scopeType) {
|
||||||
|
case 'service':
|
||||||
|
// Validate service exists
|
||||||
|
if (!request.serviceName) {
|
||||||
|
throw new Error('serviceName is required for service-specific schedules');
|
||||||
|
}
|
||||||
|
const service = this.oneboxRef.database.getServiceByName(request.serviceName);
|
||||||
|
if (!service) {
|
||||||
|
throw new Error(`Service not found: ${request.serviceName}`);
|
||||||
|
}
|
||||||
|
serviceId = service.id!;
|
||||||
|
serviceName = service.name;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'pattern':
|
||||||
|
// Validate pattern is provided
|
||||||
|
if (!request.scopePattern) {
|
||||||
|
throw new Error('scopePattern is required for pattern-based schedules');
|
||||||
|
}
|
||||||
|
// Validate pattern matches at least one service
|
||||||
|
const matchingServices = this.getServicesMatchingPattern(request.scopePattern);
|
||||||
|
if (matchingServices.length === 0) {
|
||||||
|
logger.warn(`Pattern "${request.scopePattern}" currently matches no services`);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'all':
|
||||||
|
// No validation needed for global schedules
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
throw new Error(`Invalid scope type: ${request.scopeType}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use provided cron expression
|
||||||
|
const cronExpression = request.cronExpression;
|
||||||
|
|
||||||
|
// Calculate next run time
|
||||||
|
const nextRunAt = this.calculateNextRun(cronExpression);
|
||||||
|
|
||||||
|
// Create schedule in database
|
||||||
|
const schedule = this.oneboxRef.database.createBackupSchedule({
|
||||||
|
scopeType: request.scopeType,
|
||||||
|
scopePattern: request.scopePattern,
|
||||||
|
serviceId,
|
||||||
|
serviceName,
|
||||||
|
cronExpression,
|
||||||
|
retention: request.retention,
|
||||||
|
enabled: request.enabled !== false,
|
||||||
|
lastRunAt: null,
|
||||||
|
nextRunAt,
|
||||||
|
lastStatus: null,
|
||||||
|
lastError: null,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
updatedAt: Date.now(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Register task if enabled
|
||||||
|
if (schedule.enabled) {
|
||||||
|
await this.registerTask(schedule);
|
||||||
|
}
|
||||||
|
|
||||||
|
const scopeDesc = this.getScopeDescription(schedule);
|
||||||
|
const retentionDesc = this.getRetentionDescription(schedule.retention);
|
||||||
|
logger.info(`Backup schedule created: ${schedule.id} for ${scopeDesc} (${retentionDesc})`);
|
||||||
|
return schedule;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update an existing backup schedule
|
||||||
|
*/
|
||||||
|
async updateSchedule(scheduleId: number, updates: IBackupScheduleUpdate): Promise<IBackupSchedule> {
|
||||||
|
const schedule = this.oneboxRef.database.getBackupScheduleById(scheduleId);
|
||||||
|
if (!schedule) {
|
||||||
|
throw new Error(`Backup schedule not found: ${scheduleId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deschedule existing task if present
|
||||||
|
await this.descheduleTask(scheduleId);
|
||||||
|
|
||||||
|
// Update database
|
||||||
|
this.oneboxRef.database.updateBackupSchedule(scheduleId, updates);
|
||||||
|
|
||||||
|
// Get updated schedule
|
||||||
|
const updatedSchedule = this.oneboxRef.database.getBackupScheduleById(scheduleId)!;
|
||||||
|
|
||||||
|
// Calculate new next run time if cron changed
|
||||||
|
if (updates.cronExpression) {
|
||||||
|
const nextRunAt = this.calculateNextRun(updatedSchedule.cronExpression);
|
||||||
|
this.oneboxRef.database.updateBackupSchedule(scheduleId, { nextRunAt });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-register task if enabled
|
||||||
|
if (updatedSchedule.enabled) {
|
||||||
|
await this.registerTask(updatedSchedule);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`Backup schedule updated: ${scheduleId}`);
|
||||||
|
return this.oneboxRef.database.getBackupScheduleById(scheduleId)!;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a backup schedule
|
||||||
|
*/
|
||||||
|
async deleteSchedule(scheduleId: number): Promise<void> {
|
||||||
|
const schedule = this.oneboxRef.database.getBackupScheduleById(scheduleId);
|
||||||
|
if (!schedule) {
|
||||||
|
throw new Error(`Backup schedule not found: ${scheduleId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deschedule task
|
||||||
|
await this.descheduleTask(scheduleId);
|
||||||
|
|
||||||
|
// Delete from database
|
||||||
|
this.oneboxRef.database.deleteBackupSchedule(scheduleId);
|
||||||
|
|
||||||
|
logger.info(`Backup schedule deleted: ${scheduleId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger immediate backup for a schedule
|
||||||
|
*/
|
||||||
|
async triggerBackup(scheduleId: number): Promise<void> {
|
||||||
|
const schedule = this.oneboxRef.database.getBackupScheduleById(scheduleId);
|
||||||
|
if (!schedule) {
|
||||||
|
throw new Error(`Backup schedule not found: ${scheduleId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`Manually triggering backup for schedule ${scheduleId}`);
|
||||||
|
await this.executeBackup(schedule);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all schedules
|
||||||
|
*/
|
||||||
|
getAllSchedules(): IBackupSchedule[] {
|
||||||
|
return this.oneboxRef.database.getAllBackupSchedules();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get schedule by ID
|
||||||
|
*/
|
||||||
|
getScheduleById(id: number): IBackupSchedule | null {
|
||||||
|
return this.oneboxRef.database.getBackupScheduleById(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get schedules for a service
|
||||||
|
*/
|
||||||
|
getSchedulesForService(serviceName: string): IBackupSchedule[] {
|
||||||
|
const service = this.oneboxRef.database.getServiceByName(serviceName);
|
||||||
|
if (!service) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
return this.oneboxRef.database.getBackupSchedulesByService(service.id!);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get retention presets
|
||||||
|
*/
|
||||||
|
getRetentionPresets(): typeof RETENTION_PRESETS {
|
||||||
|
return RETENTION_PRESETS;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Private Methods ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register a task for a schedule
|
||||||
|
*/
|
||||||
|
private async registerTask(schedule: IBackupSchedule): Promise<void> {
|
||||||
|
const taskName = `backup-${schedule.id}`;
|
||||||
|
|
||||||
|
const task = new plugins.taskbuffer.Task({
|
||||||
|
name: taskName,
|
||||||
|
taskFunction: async () => {
|
||||||
|
await this.executeBackup(schedule);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add and schedule the task
|
||||||
|
this.taskManager.addAndScheduleTask(task, schedule.cronExpression);
|
||||||
|
this.scheduledTasks.set(schedule.id!, task);
|
||||||
|
|
||||||
|
// Update next run time in database
|
||||||
|
this.updateNextRunTime(schedule.id!);
|
||||||
|
|
||||||
|
logger.debug(`Registered backup task: ${taskName} with cron: ${schedule.cronExpression}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deschedule a task
|
||||||
|
*/
|
||||||
|
private async descheduleTask(scheduleId: number): Promise<void> {
|
||||||
|
const task = this.scheduledTasks.get(scheduleId);
|
||||||
|
if (task) {
|
||||||
|
await this.taskManager.descheduleTask(task);
|
||||||
|
this.scheduledTasks.delete(scheduleId);
|
||||||
|
logger.debug(`Descheduled backup task for schedule ${scheduleId}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a backup for a schedule
|
||||||
|
*/
|
||||||
|
private async executeBackup(schedule: IBackupSchedule): Promise<void> {
|
||||||
|
const scopeDesc = this.getScopeDescription(schedule);
|
||||||
|
const servicesToBackup = this.getServicesForSchedule(schedule);
|
||||||
|
|
||||||
|
if (servicesToBackup.length === 0) {
|
||||||
|
logger.warn(`No services to backup for schedule ${schedule.id} (${scopeDesc})`);
|
||||||
|
this.oneboxRef.database.updateBackupSchedule(schedule.id!, {
|
||||||
|
lastRunAt: Date.now(),
|
||||||
|
lastStatus: 'success',
|
||||||
|
lastError: 'No matching services found',
|
||||||
|
});
|
||||||
|
this.updateNextRunTime(schedule.id!);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const retentionDesc = this.getRetentionDescription(schedule.retention);
|
||||||
|
logger.info(`Executing scheduled backup for ${scopeDesc}: ${servicesToBackup.length} service(s) (${retentionDesc})`);
|
||||||
|
|
||||||
|
let successCount = 0;
|
||||||
|
let failCount = 0;
|
||||||
|
const errors: string[] = [];
|
||||||
|
|
||||||
|
for (const service of servicesToBackup) {
|
||||||
|
try {
|
||||||
|
// Create backup with schedule ID
|
||||||
|
await this.oneboxRef.backupManager.createBackup(service.name, {
|
||||||
|
scheduleId: schedule.id,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Apply time-window based retention policy for this service
|
||||||
|
await this.applyRetention(schedule, service.id!);
|
||||||
|
|
||||||
|
successCount++;
|
||||||
|
logger.success(`Scheduled backup completed for ${service.name}`);
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = getErrorMessage(error);
|
||||||
|
logger.error(`Scheduled backup failed for ${service.name}: ${errorMessage}`);
|
||||||
|
errors.push(`${service.name}: ${errorMessage}`);
|
||||||
|
failCount++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update schedule status
|
||||||
|
const lastStatus = failCount === 0 ? 'success' : 'failed';
|
||||||
|
const lastError = errors.length > 0 ? errors.join('; ') : null;
|
||||||
|
|
||||||
|
this.oneboxRef.database.updateBackupSchedule(schedule.id!, {
|
||||||
|
lastRunAt: Date.now(),
|
||||||
|
lastStatus,
|
||||||
|
lastError,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (failCount === 0) {
|
||||||
|
logger.success(`Scheduled backup completed for ${scopeDesc}: ${successCount} service(s)`);
|
||||||
|
} else {
|
||||||
|
logger.warn(`Scheduled backup partially failed for ${scopeDesc}: ${successCount} succeeded, ${failCount} failed`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update next run time
|
||||||
|
this.updateNextRunTime(schedule.id!);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply time-window based retention policy
|
||||||
|
* Works correctly regardless of backup frequency (cron schedule)
|
||||||
|
*/
|
||||||
|
private async applyRetention(schedule: IBackupSchedule, serviceId: number): Promise<void> {
|
||||||
|
// Get all backups for this schedule and service
|
||||||
|
const allBackups = this.oneboxRef.database.getBackupsByService(serviceId);
|
||||||
|
const backups = allBackups.filter(b => b.scheduleId === schedule.id);
|
||||||
|
|
||||||
|
if (backups.length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { hourly, daily, weekly, monthly } = schedule.retention;
|
||||||
|
const now = Date.now();
|
||||||
|
const toKeep = new Set<number>();
|
||||||
|
|
||||||
|
// Hourly: Keep up to N most recent backups from last 24 hours
|
||||||
|
if (hourly > 0) {
|
||||||
|
const recentBackups = backups
|
||||||
|
.filter(b => now - b.createdAt < 24 * 60 * 60 * 1000)
|
||||||
|
.sort((a, b) => b.createdAt - a.createdAt)
|
||||||
|
.slice(0, hourly);
|
||||||
|
recentBackups.forEach(b => toKeep.add(b.id!));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Daily: Keep oldest backup per day for last N days
|
||||||
|
if (daily > 0) {
|
||||||
|
for (let i = 0; i < daily; i++) {
|
||||||
|
const dayStart = this.getStartOfDay(now, i);
|
||||||
|
const dayEnd = dayStart + 24 * 60 * 60 * 1000;
|
||||||
|
const dayBackups = backups.filter(b =>
|
||||||
|
b.createdAt >= dayStart && b.createdAt < dayEnd
|
||||||
|
);
|
||||||
|
if (dayBackups.length > 0) {
|
||||||
|
// Keep oldest from this day (most representative)
|
||||||
|
const oldest = dayBackups.sort((a, b) => a.createdAt - b.createdAt)[0];
|
||||||
|
toKeep.add(oldest.id!);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Weekly: Keep oldest backup per week for last N weeks
|
||||||
|
if (weekly > 0) {
|
||||||
|
for (let i = 0; i < weekly; i++) {
|
||||||
|
const weekStart = this.getStartOfWeek(now, i);
|
||||||
|
const weekEnd = weekStart + 7 * 24 * 60 * 60 * 1000;
|
||||||
|
const weekBackups = backups.filter(b =>
|
||||||
|
b.createdAt >= weekStart && b.createdAt < weekEnd
|
||||||
|
);
|
||||||
|
if (weekBackups.length > 0) {
|
||||||
|
const oldest = weekBackups.sort((a, b) => a.createdAt - b.createdAt)[0];
|
||||||
|
toKeep.add(oldest.id!);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Monthly: Keep oldest backup per month for last N months
|
||||||
|
if (monthly > 0) {
|
||||||
|
for (let i = 0; i < monthly; i++) {
|
||||||
|
const { start, end } = this.getMonthRange(now, i);
|
||||||
|
const monthBackups = backups.filter(b =>
|
||||||
|
b.createdAt >= start && b.createdAt < end
|
||||||
|
);
|
||||||
|
if (monthBackups.length > 0) {
|
||||||
|
const oldest = monthBackups.sort((a, b) => a.createdAt - b.createdAt)[0];
|
||||||
|
toKeep.add(oldest.id!);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete anything not in toKeep
|
||||||
|
for (const backup of backups) {
|
||||||
|
if (!toKeep.has(backup.id!)) {
|
||||||
|
try {
|
||||||
|
await this.oneboxRef.backupManager.deleteBackup(backup.id!);
|
||||||
|
logger.info(`Deleted backup ${backup.filename} (retention policy)`);
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`Failed to delete old backup ${backup.filename}: ${getErrorMessage(error)}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get start of day (midnight) for N days ago
|
||||||
|
*/
|
||||||
|
private getStartOfDay(now: number, daysAgo: number): number {
|
||||||
|
const date = new Date(now);
|
||||||
|
date.setDate(date.getDate() - daysAgo);
|
||||||
|
date.setHours(0, 0, 0, 0);
|
||||||
|
return date.getTime();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get start of week (Sunday midnight) for N weeks ago
|
||||||
|
*/
|
||||||
|
private getStartOfWeek(now: number, weeksAgo: number): number {
|
||||||
|
const date = new Date(now);
|
||||||
|
date.setDate(date.getDate() - (weeksAgo * 7) - date.getDay());
|
||||||
|
date.setHours(0, 0, 0, 0);
|
||||||
|
return date.getTime();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get month range for N months ago
|
||||||
|
*/
|
||||||
|
private getMonthRange(now: number, monthsAgo: number): { start: number; end: number } {
|
||||||
|
const date = new Date(now);
|
||||||
|
date.setMonth(date.getMonth() - monthsAgo);
|
||||||
|
date.setDate(1);
|
||||||
|
date.setHours(0, 0, 0, 0);
|
||||||
|
const start = date.getTime();
|
||||||
|
|
||||||
|
date.setMonth(date.getMonth() + 1);
|
||||||
|
const end = date.getTime();
|
||||||
|
|
||||||
|
return { start, end };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update next run time for a schedule
|
||||||
|
*/
|
||||||
|
private updateNextRunTime(scheduleId: number): void {
|
||||||
|
const schedule = this.oneboxRef.database.getBackupScheduleById(scheduleId);
|
||||||
|
if (!schedule) return;
|
||||||
|
|
||||||
|
const nextRunAt = this.calculateNextRun(schedule.cronExpression);
|
||||||
|
this.oneboxRef.database.updateBackupSchedule(scheduleId, { nextRunAt });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate next run time from cron expression
|
||||||
|
*/
|
||||||
|
private calculateNextRun(cronExpression: string): number {
|
||||||
|
try {
|
||||||
|
// Get next scheduled runs from task manager
|
||||||
|
const scheduledTasks = this.taskManager.getScheduledTasks();
|
||||||
|
|
||||||
|
// Find our task and get its next run
|
||||||
|
for (const taskInfo of scheduledTasks) {
|
||||||
|
if (taskInfo.schedule === cronExpression && taskInfo.nextRun) {
|
||||||
|
return taskInfo.nextRun.getTime();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: parse cron and calculate next occurrence
|
||||||
|
// Simple implementation for common patterns
|
||||||
|
const now = new Date();
|
||||||
|
const parts = cronExpression.split(' ');
|
||||||
|
|
||||||
|
if (parts.length === 5) {
|
||||||
|
const [minute, hour, dayOfMonth, month, dayOfWeek] = parts;
|
||||||
|
|
||||||
|
// For daily schedules (e.g., "0 2 * * *")
|
||||||
|
if (dayOfMonth === '*' && month === '*' && dayOfWeek === '*') {
|
||||||
|
const nextRun = new Date(now);
|
||||||
|
nextRun.setHours(parseInt(hour), parseInt(minute), 0, 0);
|
||||||
|
if (nextRun <= now) {
|
||||||
|
nextRun.setDate(nextRun.getDate() + 1);
|
||||||
|
}
|
||||||
|
return nextRun.getTime();
|
||||||
|
}
|
||||||
|
|
||||||
|
// For weekly schedules (e.g., "0 2 * * 0")
|
||||||
|
if (dayOfMonth === '*' && month === '*' && dayOfWeek !== '*') {
|
||||||
|
const targetDay = parseInt(dayOfWeek);
|
||||||
|
const nextRun = new Date(now);
|
||||||
|
nextRun.setHours(parseInt(hour), parseInt(minute), 0, 0);
|
||||||
|
const currentDay = now.getDay();
|
||||||
|
let daysUntilTarget = (targetDay - currentDay + 7) % 7;
|
||||||
|
if (daysUntilTarget === 0 && nextRun <= now) {
|
||||||
|
daysUntilTarget = 7;
|
||||||
|
}
|
||||||
|
nextRun.setDate(nextRun.getDate() + daysUntilTarget);
|
||||||
|
return nextRun.getTime();
|
||||||
|
}
|
||||||
|
|
||||||
|
// For monthly schedules (e.g., "0 2 1 * *")
|
||||||
|
if (dayOfMonth !== '*' && month === '*' && dayOfWeek === '*') {
|
||||||
|
const targetDay = parseInt(dayOfMonth);
|
||||||
|
const nextRun = new Date(now);
|
||||||
|
nextRun.setDate(targetDay);
|
||||||
|
nextRun.setHours(parseInt(hour), parseInt(minute), 0, 0);
|
||||||
|
if (nextRun <= now) {
|
||||||
|
nextRun.setMonth(nextRun.getMonth() + 1);
|
||||||
|
}
|
||||||
|
return nextRun.getTime();
|
||||||
|
}
|
||||||
|
|
||||||
|
// For yearly schedules (e.g., "0 2 1 1 *")
|
||||||
|
if (dayOfMonth !== '*' && month !== '*' && dayOfWeek === '*') {
|
||||||
|
const targetMonth = parseInt(month) - 1; // JavaScript months are 0-indexed
|
||||||
|
const targetDay = parseInt(dayOfMonth);
|
||||||
|
const nextRun = new Date(now);
|
||||||
|
nextRun.setMonth(targetMonth, targetDay);
|
||||||
|
nextRun.setHours(parseInt(hour), parseInt(minute), 0, 0);
|
||||||
|
if (nextRun <= now) {
|
||||||
|
nextRun.setFullYear(nextRun.getFullYear() + 1);
|
||||||
|
}
|
||||||
|
return nextRun.getTime();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default: next day at 2 AM
|
||||||
|
const fallback = new Date(now);
|
||||||
|
fallback.setDate(fallback.getDate() + 1);
|
||||||
|
fallback.setHours(2, 0, 0, 0);
|
||||||
|
return fallback.getTime();
|
||||||
|
} catch {
|
||||||
|
// On any error, return tomorrow at 2 AM
|
||||||
|
const fallback = new Date();
|
||||||
|
fallback.setDate(fallback.getDate() + 1);
|
||||||
|
fallback.setHours(2, 0, 0, 0);
|
||||||
|
return fallback.getTime();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get services that match a schedule based on its scope type
|
||||||
|
*/
|
||||||
|
private getServicesForSchedule(schedule: IBackupSchedule): IService[] {
|
||||||
|
const allServices = this.oneboxRef.database.getAllServices();
|
||||||
|
|
||||||
|
switch (schedule.scopeType) {
|
||||||
|
case 'all':
|
||||||
|
return allServices;
|
||||||
|
|
||||||
|
case 'pattern':
|
||||||
|
if (!schedule.scopePattern) return [];
|
||||||
|
return this.getServicesMatchingPattern(schedule.scopePattern);
|
||||||
|
|
||||||
|
case 'service':
|
||||||
|
if (!schedule.serviceId) return [];
|
||||||
|
const service = allServices.find(s => s.id === schedule.serviceId);
|
||||||
|
return service ? [service] : [];
|
||||||
|
|
||||||
|
default:
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get services that match a glob pattern
|
||||||
|
*/
|
||||||
|
private getServicesMatchingPattern(pattern: string): IService[] {
|
||||||
|
const allServices = this.oneboxRef.database.getAllServices();
|
||||||
|
return allServices.filter(s => this.matchesGlobPattern(s.name, pattern));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Simple glob pattern matching (supports * and ?)
|
||||||
|
*/
|
||||||
|
private matchesGlobPattern(text: string, pattern: string): boolean {
|
||||||
|
// Convert glob pattern to regex
|
||||||
|
// Escape special regex characters except * and ?
|
||||||
|
const regexPattern = pattern
|
||||||
|
.replace(/[.+^${}()|[\]\\]/g, '\\$&') // Escape special chars
|
||||||
|
.replace(/\*/g, '.*') // * matches any characters
|
||||||
|
.replace(/\?/g, '.'); // ? matches single character
|
||||||
|
|
||||||
|
const regex = new RegExp(`^${regexPattern}$`, 'i');
|
||||||
|
return regex.test(text);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get human-readable description of a schedule's scope
|
||||||
|
*/
|
||||||
|
private getScopeDescription(schedule: IBackupSchedule): string {
|
||||||
|
switch (schedule.scopeType) {
|
||||||
|
case 'all':
|
||||||
|
return 'all services';
|
||||||
|
case 'pattern':
|
||||||
|
return `pattern "${schedule.scopePattern}"`;
|
||||||
|
case 'service':
|
||||||
|
return `service "${schedule.serviceName}"`;
|
||||||
|
default:
|
||||||
|
return 'unknown scope';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get human-readable description of retention policy
|
||||||
|
*/
|
||||||
|
private getRetentionDescription(retention: IRetentionPolicy): string {
|
||||||
|
return `H:${retention.hourly} D:${retention.daily} W:${retention.weekly} M:${retention.monthly}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -46,6 +46,59 @@ export class OneboxDaemon {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ensure Docker is installed, installing it if necessary
|
||||||
|
*/
|
||||||
|
private async ensureDocker(): Promise<void> {
|
||||||
|
try {
|
||||||
|
const cmd = new Deno.Command('docker', {
|
||||||
|
args: ['--version'],
|
||||||
|
stdout: 'piped',
|
||||||
|
stderr: 'piped',
|
||||||
|
});
|
||||||
|
const result = await cmd.output();
|
||||||
|
if (result.success) {
|
||||||
|
const version = new TextDecoder().decode(result.stdout).trim();
|
||||||
|
logger.info(`Docker found: ${version}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// docker command not found
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Docker not found. Installing Docker...');
|
||||||
|
const installCmd = new Deno.Command('bash', {
|
||||||
|
args: ['-c', 'curl -fsSL https://get.docker.com | sh'],
|
||||||
|
stdin: 'inherit',
|
||||||
|
stdout: 'inherit',
|
||||||
|
stderr: 'inherit',
|
||||||
|
});
|
||||||
|
const installResult = await installCmd.output();
|
||||||
|
if (!installResult.success) {
|
||||||
|
throw new Error('Failed to install Docker. Please install it manually: curl -fsSL https://get.docker.com | sh');
|
||||||
|
}
|
||||||
|
logger.success('Docker installed successfully');
|
||||||
|
|
||||||
|
// Initialize Docker Swarm
|
||||||
|
logger.info('Initializing Docker Swarm...');
|
||||||
|
const swarmCmd = new Deno.Command('docker', {
|
||||||
|
args: ['swarm', 'init'],
|
||||||
|
stdout: 'piped',
|
||||||
|
stderr: 'piped',
|
||||||
|
});
|
||||||
|
const swarmResult = await swarmCmd.output();
|
||||||
|
if (swarmResult.success) {
|
||||||
|
logger.success('Docker Swarm initialized');
|
||||||
|
} else {
|
||||||
|
const stderr = new TextDecoder().decode(swarmResult.stderr);
|
||||||
|
if (stderr.includes('already part of a swarm')) {
|
||||||
|
logger.info('Docker Swarm already initialized');
|
||||||
|
} else {
|
||||||
|
logger.warn(`Docker Swarm init warning: ${stderr.trim()}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Install systemd service
|
* Install systemd service
|
||||||
*/
|
*/
|
||||||
@@ -53,6 +106,9 @@ export class OneboxDaemon {
|
|||||||
try {
|
try {
|
||||||
logger.info('Installing Onebox daemon service...');
|
logger.info('Installing Onebox daemon service...');
|
||||||
|
|
||||||
|
// Ensure Docker is installed
|
||||||
|
await this.ensureDocker();
|
||||||
|
|
||||||
// Initialize smartdaemon if needed
|
// Initialize smartdaemon if needed
|
||||||
if (!this.smartdaemon) {
|
if (!this.smartdaemon) {
|
||||||
this.smartdaemon = new plugins.smartdaemon.SmartDaemon();
|
this.smartdaemon = new plugins.smartdaemon.SmartDaemon();
|
||||||
@@ -131,9 +187,9 @@ export class OneboxDaemon {
|
|||||||
// Start monitoring loop
|
// Start monitoring loop
|
||||||
this.startMonitoring();
|
this.startMonitoring();
|
||||||
|
|
||||||
// Start HTTP server
|
// Start OpsServer (serves new UI + TypedRequest API)
|
||||||
const httpPort = parseInt(this.oneboxRef.database.getSetting('httpPort') || '3000', 10);
|
const httpPort = parseInt(this.oneboxRef.database.getSetting('httpPort') || '3000', 10);
|
||||||
await this.oneboxRef.httpServer.start(httpPort);
|
await this.oneboxRef.opsServer.start(httpPort);
|
||||||
|
|
||||||
logger.success('Onebox daemon started');
|
logger.success('Onebox daemon started');
|
||||||
logger.info(`Web UI available at http://localhost:${httpPort}`);
|
logger.info(`Web UI available at http://localhost:${httpPort}`);
|
||||||
@@ -163,8 +219,8 @@ export class OneboxDaemon {
|
|||||||
// Stop monitoring
|
// Stop monitoring
|
||||||
this.stopMonitoring();
|
this.stopMonitoring();
|
||||||
|
|
||||||
// Stop HTTP server
|
// Stop OpsServer
|
||||||
await this.oneboxRef.httpServer.stop();
|
await this.oneboxRef.opsServer.stop();
|
||||||
|
|
||||||
// Remove PID file
|
// Remove PID file
|
||||||
await this.removePidFile();
|
await this.removePidFile();
|
||||||
@@ -280,31 +336,12 @@ export class OneboxDaemon {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Broadcast stats to WebSocket clients (real-time updates)
|
* Broadcast stats (placeholder for future WebSocket integration via OpsServer)
|
||||||
*/
|
*/
|
||||||
private async broadcastStats(): Promise<void> {
|
private async broadcastStats(): Promise<void> {
|
||||||
try {
|
// Stats broadcasting via WebSocket is not yet implemented in OpsServer.
|
||||||
const services = this.oneboxRef.services.listServices();
|
// Metrics are still collected and stored in the DB by collectMetrics().
|
||||||
const runningServices = services.filter(s => s.status === 'running' && s.containerID);
|
// The new UI fetches stats via TypedRequests on demand.
|
||||||
|
|
||||||
logger.info(`Broadcasting stats for ${runningServices.length} running services`);
|
|
||||||
|
|
||||||
for (const service of runningServices) {
|
|
||||||
try {
|
|
||||||
const stats = await this.oneboxRef.docker.getContainerStats(service.containerID!);
|
|
||||||
if (stats) {
|
|
||||||
logger.info(`Broadcasting stats for ${service.name}: CPU=${stats.cpuPercent.toFixed(1)}%, Mem=${Math.round(stats.memoryUsed / 1024 / 1024)}MB`);
|
|
||||||
this.oneboxRef.httpServer.broadcastStatsUpdate(service.name, stats);
|
|
||||||
} else {
|
|
||||||
logger.warn(`No stats returned for ${service.name} (containerID: ${service.containerID})`);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn(`Stats collection failed for ${service.name}: ${getErrorMessage(error)}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`Broadcast stats error: ${getErrorMessage(error)}`);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -8,7 +8,15 @@ import * as plugins from '../plugins.ts';
|
|||||||
import { logger } from '../logging.ts';
|
import { logger } from '../logging.ts';
|
||||||
import { getErrorMessage } from '../utils/error.ts';
|
import { getErrorMessage } from '../utils/error.ts';
|
||||||
import type { Onebox } from './onebox.ts';
|
import type { Onebox } from './onebox.ts';
|
||||||
import type { IApiResponse, ICreateRegistryTokenRequest, IRegistryTokenView, TPlatformServiceType, IContainerStats } from '../types.ts';
|
import type {
|
||||||
|
IApiResponse,
|
||||||
|
ICreateRegistryTokenRequest,
|
||||||
|
IRegistryTokenView,
|
||||||
|
TPlatformServiceType,
|
||||||
|
IContainerStats,
|
||||||
|
IBackupScheduleCreate,
|
||||||
|
IBackupScheduleUpdate,
|
||||||
|
} from '../types.ts';
|
||||||
|
|
||||||
export class OneboxHttpServer {
|
export class OneboxHttpServer {
|
||||||
private oneboxRef: Onebox;
|
private oneboxRef: Onebox;
|
||||||
@@ -319,6 +327,52 @@ export class OneboxHttpServer {
|
|||||||
return await this.handleGetNetworkStatsRequest();
|
return await this.handleGetNetworkStatsRequest();
|
||||||
} else if (path === '/api/network/traffic-stats' && method === 'GET') {
|
} else if (path === '/api/network/traffic-stats' && method === 'GET') {
|
||||||
return await this.handleGetTrafficStatsRequest(new URL(req.url));
|
return await this.handleGetTrafficStatsRequest(new URL(req.url));
|
||||||
|
// Backup endpoints
|
||||||
|
} else if (path === '/api/backups' && method === 'GET') {
|
||||||
|
return await this.handleListBackupsRequest();
|
||||||
|
} else if (path.match(/^\/api\/services\/[^/]+\/backups$/) && method === 'GET') {
|
||||||
|
const serviceName = path.split('/')[3];
|
||||||
|
return await this.handleListServiceBackupsRequest(serviceName);
|
||||||
|
} else if (path.match(/^\/api\/services\/[^/]+\/backup$/) && method === 'POST') {
|
||||||
|
const serviceName = path.split('/')[3];
|
||||||
|
return await this.handleCreateBackupRequest(serviceName);
|
||||||
|
} else if (path.match(/^\/api\/backups\/\d+$/) && method === 'GET') {
|
||||||
|
const backupId = Number(path.split('/').pop());
|
||||||
|
return await this.handleGetBackupRequest(backupId);
|
||||||
|
} else if (path.match(/^\/api\/backups\/\d+\/download$/) && method === 'GET') {
|
||||||
|
const backupId = Number(path.split('/')[3]);
|
||||||
|
return await this.handleDownloadBackupRequest(backupId);
|
||||||
|
} else if (path.match(/^\/api\/backups\/\d+$/) && method === 'DELETE') {
|
||||||
|
const backupId = Number(path.split('/').pop());
|
||||||
|
return await this.handleDeleteBackupRequest(backupId);
|
||||||
|
} else if (path === '/api/backups/restore' && method === 'POST') {
|
||||||
|
return await this.handleRestoreBackupRequest(req);
|
||||||
|
} else if (path === '/api/backups/import' && method === 'POST') {
|
||||||
|
return await this.handleImportBackupRequest(req);
|
||||||
|
} else if (path === '/api/settings/backup-password' && method === 'POST') {
|
||||||
|
return await this.handleSetBackupPasswordRequest(req);
|
||||||
|
} else if (path === '/api/settings/backup-password' && method === 'GET') {
|
||||||
|
return await this.handleCheckBackupPasswordRequest();
|
||||||
|
// Backup Schedule endpoints
|
||||||
|
} else if (path === '/api/backup-schedules' && method === 'GET') {
|
||||||
|
return await this.handleListBackupSchedulesRequest();
|
||||||
|
} else if (path === '/api/backup-schedules' && method === 'POST') {
|
||||||
|
return await this.handleCreateBackupScheduleRequest(req);
|
||||||
|
} else if (path.match(/^\/api\/backup-schedules\/\d+$/) && method === 'GET') {
|
||||||
|
const scheduleId = Number(path.split('/').pop());
|
||||||
|
return await this.handleGetBackupScheduleRequest(scheduleId);
|
||||||
|
} else if (path.match(/^\/api\/backup-schedules\/\d+$/) && method === 'PUT') {
|
||||||
|
const scheduleId = Number(path.split('/').pop());
|
||||||
|
return await this.handleUpdateBackupScheduleRequest(scheduleId, req);
|
||||||
|
} else if (path.match(/^\/api\/backup-schedules\/\d+$/) && method === 'DELETE') {
|
||||||
|
const scheduleId = Number(path.split('/').pop());
|
||||||
|
return await this.handleDeleteBackupScheduleRequest(scheduleId);
|
||||||
|
} else if (path.match(/^\/api\/backup-schedules\/\d+\/trigger$/) && method === 'POST') {
|
||||||
|
const scheduleId = Number(path.split('/')[3]);
|
||||||
|
return await this.handleTriggerBackupScheduleRequest(scheduleId);
|
||||||
|
} else if (path.match(/^\/api\/services\/[^/]+\/backup-schedules$/) && method === 'GET') {
|
||||||
|
const serviceName = path.split('/')[3];
|
||||||
|
return await this.handleListServiceBackupSchedulesRequest(serviceName);
|
||||||
} else {
|
} else {
|
||||||
return this.jsonResponse({ success: false, error: 'Not found' }, 404);
|
return this.jsonResponse({ success: false, error: 'Not found' }, 404);
|
||||||
}
|
}
|
||||||
@@ -2017,6 +2071,626 @@ export class OneboxHttpServer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============ Backup Endpoints ============
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List all backups
|
||||||
|
*/
|
||||||
|
private async handleListBackupsRequest(): Promise<Response> {
|
||||||
|
try {
|
||||||
|
const backups = this.oneboxRef.backupManager.listBackups();
|
||||||
|
return this.jsonResponse({ success: true, data: backups });
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to list backups: ${getErrorMessage(error)}`);
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error) || 'Failed to list backups',
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List backups for a specific service
|
||||||
|
*/
|
||||||
|
private async handleListServiceBackupsRequest(serviceName: string): Promise<Response> {
|
||||||
|
try {
|
||||||
|
const service = this.oneboxRef.services.getService(serviceName);
|
||||||
|
if (!service) {
|
||||||
|
return this.jsonResponse({ success: false, error: 'Service not found' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
const backups = this.oneboxRef.backupManager.listBackups(serviceName);
|
||||||
|
return this.jsonResponse({ success: true, data: backups });
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to list backups for service ${serviceName}: ${getErrorMessage(error)}`);
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error) || 'Failed to list backups',
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a backup for a service
|
||||||
|
*/
|
||||||
|
private async handleCreateBackupRequest(serviceName: string): Promise<Response> {
|
||||||
|
try {
|
||||||
|
const service = this.oneboxRef.services.getService(serviceName);
|
||||||
|
if (!service) {
|
||||||
|
return this.jsonResponse({ success: false, error: 'Service not found' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await this.oneboxRef.backupManager.createBackup(serviceName);
|
||||||
|
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: true,
|
||||||
|
message: `Backup created for service ${serviceName}`,
|
||||||
|
data: result.backup,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to create backup for service ${serviceName}: ${getErrorMessage(error)}`);
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error) || 'Failed to create backup',
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a specific backup by ID
|
||||||
|
*/
|
||||||
|
private async handleGetBackupRequest(backupId: number): Promise<Response> {
|
||||||
|
try {
|
||||||
|
const backup = this.oneboxRef.database.getBackupById(backupId);
|
||||||
|
if (!backup) {
|
||||||
|
return this.jsonResponse({ success: false, error: 'Backup not found' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.jsonResponse({ success: true, data: backup });
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to get backup ${backupId}: ${getErrorMessage(error)}`);
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error) || 'Failed to get backup',
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Download a backup file
|
||||||
|
*/
|
||||||
|
private async handleDownloadBackupRequest(backupId: number): Promise<Response> {
|
||||||
|
try {
|
||||||
|
const filePath = this.oneboxRef.backupManager.getBackupFilePath(backupId);
|
||||||
|
if (!filePath) {
|
||||||
|
return this.jsonResponse({ success: false, error: 'Backup not found' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if file exists
|
||||||
|
try {
|
||||||
|
await Deno.stat(filePath);
|
||||||
|
} catch {
|
||||||
|
return this.jsonResponse({ success: false, error: 'Backup file not found on disk' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read file and return as download
|
||||||
|
const backup = this.oneboxRef.database.getBackupById(backupId);
|
||||||
|
const file = await Deno.readFile(filePath);
|
||||||
|
|
||||||
|
return new Response(file, {
|
||||||
|
status: 200,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/octet-stream',
|
||||||
|
'Content-Disposition': `attachment; filename="${backup?.filename || 'backup.tar.enc'}"`,
|
||||||
|
'Content-Length': String(file.length),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to download backup ${backupId}: ${getErrorMessage(error)}`);
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error) || 'Failed to download backup',
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a backup
|
||||||
|
*/
|
||||||
|
private async handleDeleteBackupRequest(backupId: number): Promise<Response> {
|
||||||
|
try {
|
||||||
|
const backup = this.oneboxRef.database.getBackupById(backupId);
|
||||||
|
if (!backup) {
|
||||||
|
return this.jsonResponse({ success: false, error: 'Backup not found' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.oneboxRef.backupManager.deleteBackup(backupId);
|
||||||
|
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: true,
|
||||||
|
message: 'Backup deleted successfully',
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to delete backup ${backupId}: ${getErrorMessage(error)}`);
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error) || 'Failed to delete backup',
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Restore a backup
|
||||||
|
*/
|
||||||
|
private async handleRestoreBackupRequest(req: Request): Promise<Response> {
|
||||||
|
try {
|
||||||
|
const body = await req.json();
|
||||||
|
const { backupId, mode, newServiceName, overwriteExisting, skipPlatformData } = body;
|
||||||
|
|
||||||
|
if (!backupId) {
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: 'Backup ID is required',
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!mode || !['restore', 'import', 'clone'].includes(mode)) {
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: 'Valid mode required: restore, import, or clone',
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get backup file path
|
||||||
|
const filePath = this.oneboxRef.backupManager.getBackupFilePath(backupId);
|
||||||
|
if (!filePath) {
|
||||||
|
return this.jsonResponse({ success: false, error: 'Backup not found' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate mode-specific requirements
|
||||||
|
if ((mode === 'import' || mode === 'clone') && !newServiceName) {
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: `New service name required for '${mode}' mode`,
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await this.oneboxRef.backupManager.restoreBackup(filePath, {
|
||||||
|
mode,
|
||||||
|
newServiceName,
|
||||||
|
overwriteExisting: overwriteExisting === true,
|
||||||
|
skipPlatformData: skipPlatformData === true,
|
||||||
|
});
|
||||||
|
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: true,
|
||||||
|
message: `Backup restored successfully as service '${result.service.name}'`,
|
||||||
|
data: {
|
||||||
|
service: result.service,
|
||||||
|
platformResourcesRestored: result.platformResourcesRestored,
|
||||||
|
warnings: result.warnings,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to restore backup: ${getErrorMessage(error)}`);
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error) || 'Failed to restore backup',
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Import a backup from file upload or URL
|
||||||
|
*/
|
||||||
|
private async handleImportBackupRequest(req: Request): Promise<Response> {
|
||||||
|
try {
|
||||||
|
const contentType = req.headers.get('content-type') || '';
|
||||||
|
let filePath: string | null = null;
|
||||||
|
let newServiceName: string | undefined;
|
||||||
|
let tempFile = false;
|
||||||
|
|
||||||
|
if (contentType.includes('multipart/form-data')) {
|
||||||
|
// Handle file upload
|
||||||
|
const formData = await req.formData();
|
||||||
|
const file = formData.get('file');
|
||||||
|
newServiceName = formData.get('newServiceName')?.toString() || undefined;
|
||||||
|
|
||||||
|
if (!file || !(file instanceof File)) {
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: 'No file provided',
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate file extension
|
||||||
|
if (!file.name.endsWith('.tar.enc')) {
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: 'Invalid file format. Expected .tar.enc file',
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save to temp location
|
||||||
|
const tempDir = './.nogit/temp-imports';
|
||||||
|
await Deno.mkdir(tempDir, { recursive: true });
|
||||||
|
filePath = `${tempDir}/${Date.now()}-${file.name}`;
|
||||||
|
tempFile = true;
|
||||||
|
|
||||||
|
const arrayBuffer = await file.arrayBuffer();
|
||||||
|
await Deno.writeFile(filePath, new Uint8Array(arrayBuffer));
|
||||||
|
|
||||||
|
logger.info(`Saved uploaded backup to ${filePath}`);
|
||||||
|
} else {
|
||||||
|
// Handle JSON body with URL
|
||||||
|
const body = await req.json();
|
||||||
|
const { url, newServiceName: serviceName } = body;
|
||||||
|
newServiceName = serviceName;
|
||||||
|
|
||||||
|
if (!url) {
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: 'URL is required when not uploading a file',
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Download from URL
|
||||||
|
const tempDir = './.nogit/temp-imports';
|
||||||
|
await Deno.mkdir(tempDir, { recursive: true });
|
||||||
|
|
||||||
|
const urlFilename = url.split('/').pop() || 'backup.tar.enc';
|
||||||
|
filePath = `${tempDir}/${Date.now()}-${urlFilename}`;
|
||||||
|
tempFile = true;
|
||||||
|
|
||||||
|
logger.info(`Downloading backup from ${url}...`);
|
||||||
|
const response = await fetch(url);
|
||||||
|
if (!response.ok) {
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: `Failed to download from URL: ${response.statusText}`,
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayBuffer = await response.arrayBuffer();
|
||||||
|
await Deno.writeFile(filePath, new Uint8Array(arrayBuffer));
|
||||||
|
|
||||||
|
logger.info(`Downloaded backup to ${filePath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import using restoreBackup with mode='import'
|
||||||
|
const result = await this.oneboxRef.backupManager.restoreBackup(filePath, {
|
||||||
|
mode: 'import',
|
||||||
|
newServiceName,
|
||||||
|
overwriteExisting: false,
|
||||||
|
skipPlatformData: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Clean up temp file
|
||||||
|
if (tempFile && filePath) {
|
||||||
|
try {
|
||||||
|
await Deno.remove(filePath);
|
||||||
|
} catch {
|
||||||
|
// Ignore cleanup errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: true,
|
||||||
|
message: `Backup imported successfully as service '${result.service.name}'`,
|
||||||
|
data: {
|
||||||
|
service: result.service,
|
||||||
|
platformResourcesRestored: result.platformResourcesRestored,
|
||||||
|
warnings: result.warnings,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to import backup: ${getErrorMessage(error)}`);
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error) || 'Failed to import backup',
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set backup encryption password
|
||||||
|
*/
|
||||||
|
private async handleSetBackupPasswordRequest(req: Request): Promise<Response> {
|
||||||
|
try {
|
||||||
|
const body = await req.json();
|
||||||
|
const { password } = body;
|
||||||
|
|
||||||
|
if (!password || typeof password !== 'string') {
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: 'Password is required',
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (password.length < 8) {
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: 'Password must be at least 8 characters',
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store password in settings
|
||||||
|
this.oneboxRef.database.setSetting('backup_encryption_password', password);
|
||||||
|
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: true,
|
||||||
|
message: 'Backup password set successfully',
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to set backup password: ${getErrorMessage(error)}`);
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error) || 'Failed to set backup password',
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if backup password is configured
|
||||||
|
*/
|
||||||
|
private async handleCheckBackupPasswordRequest(): Promise<Response> {
|
||||||
|
try {
|
||||||
|
const password = this.oneboxRef.database.getSetting('backup_encryption_password');
|
||||||
|
const isConfigured = password !== null && password.length > 0;
|
||||||
|
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
isConfigured,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to check backup password: ${getErrorMessage(error)}`);
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error) || 'Failed to check backup password',
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============ Backup Schedule Endpoints ============
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List all backup schedules
|
||||||
|
*/
|
||||||
|
private async handleListBackupSchedulesRequest(): Promise<Response> {
|
||||||
|
try {
|
||||||
|
const schedules = this.oneboxRef.backupScheduler.getAllSchedules();
|
||||||
|
return this.jsonResponse({ success: true, data: schedules });
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to list backup schedules: ${getErrorMessage(error)}`);
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error) || 'Failed to list backup schedules',
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new backup schedule
|
||||||
|
*/
|
||||||
|
private async handleCreateBackupScheduleRequest(req: Request): Promise<Response> {
|
||||||
|
try {
|
||||||
|
const body = await req.json() as IBackupScheduleCreate;
|
||||||
|
|
||||||
|
// Validate scope type
|
||||||
|
if (!body.scopeType) {
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: 'Scope type is required (all, pattern, or service)',
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!['all', 'pattern', 'service'].includes(body.scopeType)) {
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: 'Invalid scope type. Must be: all, pattern, or service',
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate scope-specific requirements
|
||||||
|
if (body.scopeType === 'service' && !body.serviceName) {
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: 'Service name is required for service-specific schedules',
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (body.scopeType === 'pattern' && !body.scopePattern) {
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: 'Scope pattern is required for pattern-based schedules',
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!body.cronExpression) {
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: 'Cron expression is required',
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!body.retention) {
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: 'Retention policy is required',
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate retention policy
|
||||||
|
const { hourly, daily, weekly, monthly } = body.retention;
|
||||||
|
if (typeof hourly !== 'number' || typeof daily !== 'number' ||
|
||||||
|
typeof weekly !== 'number' || typeof monthly !== 'number') {
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: 'Retention policy must have hourly, daily, weekly, and monthly as numbers',
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hourly < 0 || daily < 0 || weekly < 0 || monthly < 0) {
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: 'Retention values must be non-negative',
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
const schedule = await this.oneboxRef.backupScheduler.createSchedule(body);
|
||||||
|
|
||||||
|
// Build descriptive message based on scope type
|
||||||
|
let scopeDesc: string;
|
||||||
|
switch (body.scopeType) {
|
||||||
|
case 'all':
|
||||||
|
scopeDesc = 'all services';
|
||||||
|
break;
|
||||||
|
case 'pattern':
|
||||||
|
scopeDesc = `pattern '${body.scopePattern}'`;
|
||||||
|
break;
|
||||||
|
case 'service':
|
||||||
|
scopeDesc = `service '${body.serviceName}'`;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: true,
|
||||||
|
message: `Backup schedule created for ${scopeDesc}`,
|
||||||
|
data: schedule,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to create backup schedule: ${getErrorMessage(error)}`);
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error) || 'Failed to create backup schedule',
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a specific backup schedule
|
||||||
|
*/
|
||||||
|
private async handleGetBackupScheduleRequest(scheduleId: number): Promise<Response> {
|
||||||
|
try {
|
||||||
|
const schedule = this.oneboxRef.backupScheduler.getScheduleById(scheduleId);
|
||||||
|
if (!schedule) {
|
||||||
|
return this.jsonResponse({ success: false, error: 'Backup schedule not found' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.jsonResponse({ success: true, data: schedule });
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to get backup schedule ${scheduleId}: ${getErrorMessage(error)}`);
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error) || 'Failed to get backup schedule',
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update a backup schedule
|
||||||
|
*/
|
||||||
|
private async handleUpdateBackupScheduleRequest(scheduleId: number, req: Request): Promise<Response> {
|
||||||
|
try {
|
||||||
|
const body = await req.json() as IBackupScheduleUpdate;
|
||||||
|
|
||||||
|
// Validate retention policy if provided
|
||||||
|
if (body.retention) {
|
||||||
|
const { hourly, daily, weekly, monthly } = body.retention;
|
||||||
|
if (typeof hourly !== 'number' || typeof daily !== 'number' ||
|
||||||
|
typeof weekly !== 'number' || typeof monthly !== 'number') {
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: 'Retention policy must have hourly, daily, weekly, and monthly as numbers',
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
if (hourly < 0 || daily < 0 || weekly < 0 || monthly < 0) {
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: 'Retention values must be non-negative',
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const schedule = await this.oneboxRef.backupScheduler.updateSchedule(scheduleId, body);
|
||||||
|
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: true,
|
||||||
|
message: 'Backup schedule updated',
|
||||||
|
data: schedule,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to update backup schedule ${scheduleId}: ${getErrorMessage(error)}`);
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error) || 'Failed to update backup schedule',
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a backup schedule
|
||||||
|
*/
|
||||||
|
private async handleDeleteBackupScheduleRequest(scheduleId: number): Promise<Response> {
|
||||||
|
try {
|
||||||
|
await this.oneboxRef.backupScheduler.deleteSchedule(scheduleId);
|
||||||
|
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: true,
|
||||||
|
message: 'Backup schedule deleted',
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to delete backup schedule ${scheduleId}: ${getErrorMessage(error)}`);
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error) || 'Failed to delete backup schedule',
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger immediate backup for a schedule
|
||||||
|
*/
|
||||||
|
private async handleTriggerBackupScheduleRequest(scheduleId: number): Promise<Response> {
|
||||||
|
try {
|
||||||
|
await this.oneboxRef.backupScheduler.triggerBackup(scheduleId);
|
||||||
|
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: true,
|
||||||
|
message: 'Backup triggered successfully',
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to trigger backup for schedule ${scheduleId}: ${getErrorMessage(error)}`);
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error) || 'Failed to trigger backup',
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List backup schedules for a specific service
|
||||||
|
*/
|
||||||
|
private async handleListServiceBackupSchedulesRequest(serviceName: string): Promise<Response> {
|
||||||
|
try {
|
||||||
|
const service = this.oneboxRef.services.getService(serviceName);
|
||||||
|
if (!service) {
|
||||||
|
return this.jsonResponse({ success: false, error: 'Service not found' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
const schedules = this.oneboxRef.backupScheduler.getSchedulesForService(serviceName);
|
||||||
|
return this.jsonResponse({ success: true, data: schedules });
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to list backup schedules for service ${serviceName}: ${getErrorMessage(error)}`);
|
||||||
|
return this.jsonResponse({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error) || 'Failed to list backup schedules',
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Helper to create JSON response
|
* Helper to create JSON response
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -20,6 +20,9 @@ import { CertRequirementManager } from './cert-requirement-manager.ts';
|
|||||||
import { RegistryManager } from './registry.ts';
|
import { RegistryManager } from './registry.ts';
|
||||||
import { PlatformServicesManager } from './platform-services/index.ts';
|
import { PlatformServicesManager } from './platform-services/index.ts';
|
||||||
import { CaddyLogReceiver } from './caddy-log-receiver.ts';
|
import { CaddyLogReceiver } from './caddy-log-receiver.ts';
|
||||||
|
import { BackupManager } from './backup-manager.ts';
|
||||||
|
import { BackupScheduler } from './backup-scheduler.ts';
|
||||||
|
import { OpsServer } from '../opsserver/index.ts';
|
||||||
|
|
||||||
export class Onebox {
|
export class Onebox {
|
||||||
public database: OneboxDatabase;
|
public database: OneboxDatabase;
|
||||||
@@ -36,6 +39,9 @@ export class Onebox {
|
|||||||
public registry: RegistryManager;
|
public registry: RegistryManager;
|
||||||
public platformServices: PlatformServicesManager;
|
public platformServices: PlatformServicesManager;
|
||||||
public caddyLogReceiver: CaddyLogReceiver;
|
public caddyLogReceiver: CaddyLogReceiver;
|
||||||
|
public backupManager: BackupManager;
|
||||||
|
public backupScheduler: BackupScheduler;
|
||||||
|
public opsServer: OpsServer;
|
||||||
|
|
||||||
private initialized = false;
|
private initialized = false;
|
||||||
|
|
||||||
@@ -67,6 +73,15 @@ export class Onebox {
|
|||||||
|
|
||||||
// Initialize Caddy log receiver
|
// Initialize Caddy log receiver
|
||||||
this.caddyLogReceiver = new CaddyLogReceiver(9999);
|
this.caddyLogReceiver = new CaddyLogReceiver(9999);
|
||||||
|
|
||||||
|
// Initialize Backup manager
|
||||||
|
this.backupManager = new BackupManager(this);
|
||||||
|
|
||||||
|
// Initialize Backup scheduler
|
||||||
|
this.backupScheduler = new BackupScheduler(this);
|
||||||
|
|
||||||
|
// Initialize OpsServer (TypedRequest-based server)
|
||||||
|
this.opsServer = new OpsServer(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -161,6 +176,14 @@ export class Onebox {
|
|||||||
// Start auto-update monitoring for registry services
|
// Start auto-update monitoring for registry services
|
||||||
this.services.startAutoUpdateMonitoring();
|
this.services.startAutoUpdateMonitoring();
|
||||||
|
|
||||||
|
// Initialize Backup Scheduler (non-critical)
|
||||||
|
try {
|
||||||
|
await this.backupScheduler.init();
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Backup scheduler initialization failed - scheduled backups will be disabled');
|
||||||
|
logger.warn(`Error: ${getErrorMessage(error)}`);
|
||||||
|
}
|
||||||
|
|
||||||
this.initialized = true;
|
this.initialized = true;
|
||||||
logger.success('Onebox initialized successfully');
|
logger.success('Onebox initialized successfully');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -312,17 +335,17 @@ export class Onebox {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Start HTTP server
|
* Start OpsServer (TypedRequest-based, serves new UI)
|
||||||
*/
|
*/
|
||||||
async startHttpServer(port?: number): Promise<void> {
|
async startHttpServer(port?: number): Promise<void> {
|
||||||
await this.httpServer.start(port);
|
await this.opsServer.start(port || 3000);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stop HTTP server
|
* Stop OpsServer
|
||||||
*/
|
*/
|
||||||
async stopHttpServer(): Promise<void> {
|
async stopHttpServer(): Promise<void> {
|
||||||
await this.httpServer.stop();
|
await this.opsServer.stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -332,11 +355,14 @@ export class Onebox {
|
|||||||
try {
|
try {
|
||||||
logger.info('Shutting down Onebox...');
|
logger.info('Shutting down Onebox...');
|
||||||
|
|
||||||
|
// Stop backup scheduler
|
||||||
|
await this.backupScheduler.stop();
|
||||||
|
|
||||||
// Stop daemon if running
|
// Stop daemon if running
|
||||||
await this.daemon.stop();
|
await this.daemon.stop();
|
||||||
|
|
||||||
// Stop HTTP server if running
|
// Stop OpsServer if running
|
||||||
await this.httpServer.stop();
|
await this.opsServer.stop();
|
||||||
|
|
||||||
// Stop reverse proxy if running
|
// Stop reverse proxy if running
|
||||||
await this.reverseProxy.stop();
|
await this.reverseProxy.stop();
|
||||||
|
|||||||
85
ts/cli.ts
85
ts/cli.ts
@@ -72,6 +72,10 @@ export async function runCli(): Promise<void> {
|
|||||||
await handleStatusCommand(onebox);
|
await handleStatusCommand(onebox);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
case 'upgrade':
|
||||||
|
await handleUpgradeCommand();
|
||||||
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
logger.error(`Unknown command: ${command}`);
|
logger.error(`Unknown command: ${command}`);
|
||||||
printHelp();
|
printHelp();
|
||||||
@@ -286,8 +290,8 @@ async function handleServerCommand(onebox: Onebox, args: string[]) {
|
|||||||
|
|
||||||
logger.info('Starting Onebox server...');
|
logger.info('Starting Onebox server...');
|
||||||
|
|
||||||
// Start HTTP server
|
// Start OpsServer (serves new UI + TypedRequest API)
|
||||||
await onebox.httpServer.start(port);
|
await onebox.opsServer.start(port);
|
||||||
|
|
||||||
// Start monitoring if requested
|
// Start monitoring if requested
|
||||||
if (monitor) {
|
if (monitor) {
|
||||||
@@ -308,7 +312,7 @@ async function handleServerCommand(onebox: Onebox, args: string[]) {
|
|||||||
if (monitor) {
|
if (monitor) {
|
||||||
onebox.daemon.stopMonitoring();
|
onebox.daemon.stopMonitoring();
|
||||||
}
|
}
|
||||||
await onebox.httpServer.stop();
|
await onebox.opsServer.stop();
|
||||||
await onebox.shutdown();
|
await onebox.shutdown();
|
||||||
Deno.exit(0);
|
Deno.exit(0);
|
||||||
};
|
};
|
||||||
@@ -386,6 +390,78 @@ async function handleStatusCommand(onebox: Onebox) {
|
|||||||
console.log(JSON.stringify(status, null, 2));
|
console.log(JSON.stringify(status, null, 2));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Upgrade command - self-update onebox to latest version
|
||||||
|
async function handleUpgradeCommand(): Promise<void> {
|
||||||
|
// Check if running as root
|
||||||
|
if (Deno.uid() !== 0) {
|
||||||
|
logger.error('This command must be run as root to upgrade Onebox.');
|
||||||
|
logger.info('Try: sudo onebox upgrade');
|
||||||
|
Deno.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Checking for updates...');
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get current version
|
||||||
|
const currentVersion = projectInfo.version;
|
||||||
|
|
||||||
|
// Fetch latest version from Gitea API
|
||||||
|
const apiUrl = 'https://code.foss.global/api/v1/repos/serve.zone/onebox/releases/latest';
|
||||||
|
const curlCmd = new Deno.Command('curl', {
|
||||||
|
args: ['-sSL', apiUrl],
|
||||||
|
stdout: 'piped',
|
||||||
|
stderr: 'piped',
|
||||||
|
});
|
||||||
|
const curlResult = await curlCmd.output();
|
||||||
|
const response = new TextDecoder().decode(curlResult.stdout);
|
||||||
|
const release = JSON.parse(response);
|
||||||
|
const latestVersion = release.tag_name as string; // e.g., "v1.11.0"
|
||||||
|
|
||||||
|
// Normalize versions for comparison (ensure both have "v" prefix)
|
||||||
|
const normalizedCurrent = currentVersion.startsWith('v')
|
||||||
|
? currentVersion
|
||||||
|
: `v${currentVersion}`;
|
||||||
|
const normalizedLatest = latestVersion.startsWith('v')
|
||||||
|
? latestVersion
|
||||||
|
: `v${latestVersion}`;
|
||||||
|
|
||||||
|
console.log(` Current version: ${normalizedCurrent}`);
|
||||||
|
console.log(` Latest version: ${normalizedLatest}`);
|
||||||
|
console.log('');
|
||||||
|
|
||||||
|
// Compare normalized versions
|
||||||
|
if (normalizedCurrent === normalizedLatest) {
|
||||||
|
logger.success('Already up to date!');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`New version available: ${latestVersion}`);
|
||||||
|
logger.info('Downloading and installing...');
|
||||||
|
console.log('');
|
||||||
|
|
||||||
|
// Download and run the install script
|
||||||
|
const installUrl = 'https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh';
|
||||||
|
const installCmd = new Deno.Command('bash', {
|
||||||
|
args: ['-c', `curl -sSL ${installUrl} | bash`],
|
||||||
|
stdin: 'inherit',
|
||||||
|
stdout: 'inherit',
|
||||||
|
stderr: 'inherit',
|
||||||
|
});
|
||||||
|
const installResult = await installCmd.output();
|
||||||
|
|
||||||
|
if (!installResult.success) {
|
||||||
|
logger.error('Upgrade failed');
|
||||||
|
Deno.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('');
|
||||||
|
logger.success(`Upgraded to ${latestVersion}`);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Upgrade failed: ${getErrorMessage(error)}`);
|
||||||
|
Deno.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Helpers
|
// Helpers
|
||||||
function getArg(args: string[], flag: string): string {
|
function getArg(args: string[], flag: string): string {
|
||||||
const arg = args.find((a) => a.startsWith(`${flag}=`));
|
const arg = args.find((a) => a.startsWith(`${flag}=`));
|
||||||
@@ -441,6 +517,9 @@ Commands:
|
|||||||
|
|
||||||
status
|
status
|
||||||
|
|
||||||
|
upgrade
|
||||||
|
Upgrade Onebox to the latest version (requires root)
|
||||||
|
|
||||||
Options:
|
Options:
|
||||||
--help, -h Show this help message
|
--help, -h Show this help message
|
||||||
--version, -v Show version
|
--version, -v Show version
|
||||||
|
|||||||
@@ -18,10 +18,14 @@ import type {
|
|||||||
IDomain,
|
IDomain,
|
||||||
ICertificate,
|
ICertificate,
|
||||||
ICertRequirement,
|
ICertRequirement,
|
||||||
|
IBackup,
|
||||||
|
IBackupSchedule,
|
||||||
|
IBackupScheduleUpdate,
|
||||||
} from '../types.ts';
|
} from '../types.ts';
|
||||||
import type { TBindValue } from './types.ts';
|
import type { TBindValue } from './types.ts';
|
||||||
import { logger } from '../logging.ts';
|
import { logger } from '../logging.ts';
|
||||||
import { getErrorMessage } from '../utils/error.ts';
|
import { getErrorMessage } from '../utils/error.ts';
|
||||||
|
import { MigrationRunner } from './migrations/index.ts';
|
||||||
|
|
||||||
// Import repositories
|
// Import repositories
|
||||||
import {
|
import {
|
||||||
@@ -31,6 +35,7 @@ import {
|
|||||||
AuthRepository,
|
AuthRepository,
|
||||||
MetricsRepository,
|
MetricsRepository,
|
||||||
PlatformRepository,
|
PlatformRepository,
|
||||||
|
BackupRepository,
|
||||||
} from './repositories/index.ts';
|
} from './repositories/index.ts';
|
||||||
|
|
||||||
export class OneboxDatabase {
|
export class OneboxDatabase {
|
||||||
@@ -44,6 +49,7 @@ export class OneboxDatabase {
|
|||||||
private authRepo!: AuthRepository;
|
private authRepo!: AuthRepository;
|
||||||
private metricsRepo!: MetricsRepository;
|
private metricsRepo!: MetricsRepository;
|
||||||
private platformRepo!: PlatformRepository;
|
private platformRepo!: PlatformRepository;
|
||||||
|
private backupRepo!: BackupRepository;
|
||||||
|
|
||||||
constructor(dbPath = './.nogit/onebox.db') {
|
constructor(dbPath = './.nogit/onebox.db') {
|
||||||
this.dbPath = dbPath;
|
this.dbPath = dbPath;
|
||||||
@@ -66,7 +72,8 @@ export class OneboxDatabase {
|
|||||||
await this.createTables();
|
await this.createTables();
|
||||||
|
|
||||||
// Run migrations if needed
|
// Run migrations if needed
|
||||||
await this.runMigrations();
|
const runner = new MigrationRunner(this.query.bind(this));
|
||||||
|
runner.run();
|
||||||
|
|
||||||
// Initialize repositories with bound query function
|
// Initialize repositories with bound query function
|
||||||
const queryFn = this.query.bind(this);
|
const queryFn = this.query.bind(this);
|
||||||
@@ -76,6 +83,7 @@ export class OneboxDatabase {
|
|||||||
this.authRepo = new AuthRepository(queryFn);
|
this.authRepo = new AuthRepository(queryFn);
|
||||||
this.metricsRepo = new MetricsRepository(queryFn);
|
this.metricsRepo = new MetricsRepository(queryFn);
|
||||||
this.platformRepo = new PlatformRepository(queryFn);
|
this.platformRepo = new PlatformRepository(queryFn);
|
||||||
|
this.backupRepo = new BackupRepository(queryFn);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`Failed to initialize database: ${getErrorMessage(error)}`);
|
logger.error(`Failed to initialize database: ${getErrorMessage(error)}`);
|
||||||
throw error;
|
throw error;
|
||||||
@@ -235,516 +243,6 @@ export class OneboxDatabase {
|
|||||||
/**
|
/**
|
||||||
* Run database migrations
|
* Run database migrations
|
||||||
*/
|
*/
|
||||||
private async runMigrations(): Promise<void> {
|
|
||||||
if (!this.db) throw new Error('Database not initialized');
|
|
||||||
|
|
||||||
try {
|
|
||||||
const currentVersion = this.getMigrationVersion();
|
|
||||||
logger.info(`Current database migration version: ${currentVersion}`);
|
|
||||||
|
|
||||||
// Migration 1: Initial schema
|
|
||||||
if (currentVersion === 0) {
|
|
||||||
logger.info('Setting initial migration version to 1');
|
|
||||||
this.setMigrationVersion(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migration 2: Convert timestamp columns from INTEGER to REAL
|
|
||||||
const updatedVersion = this.getMigrationVersion();
|
|
||||||
if (updatedVersion < 2) {
|
|
||||||
logger.info('Running migration 2: Converting timestamps to REAL...');
|
|
||||||
|
|
||||||
// SSL certificates
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE ssl_certificates_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
domain TEXT NOT NULL UNIQUE,
|
|
||||||
cert_path TEXT NOT NULL,
|
|
||||||
key_path TEXT NOT NULL,
|
|
||||||
full_chain_path TEXT NOT NULL,
|
|
||||||
expiry_date REAL NOT NULL,
|
|
||||||
issuer TEXT NOT NULL,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO ssl_certificates_new SELECT * FROM ssl_certificates`);
|
|
||||||
this.query(`DROP TABLE ssl_certificates`);
|
|
||||||
this.query(`ALTER TABLE ssl_certificates_new RENAME TO ssl_certificates`);
|
|
||||||
|
|
||||||
// Services
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE services_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
name TEXT NOT NULL UNIQUE,
|
|
||||||
image TEXT NOT NULL,
|
|
||||||
registry TEXT,
|
|
||||||
env_vars TEXT NOT NULL,
|
|
||||||
port INTEGER NOT NULL,
|
|
||||||
domain TEXT,
|
|
||||||
container_id TEXT,
|
|
||||||
status TEXT NOT NULL DEFAULT 'stopped',
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO services_new SELECT * FROM services`);
|
|
||||||
this.query(`DROP TABLE services`);
|
|
||||||
this.query(`ALTER TABLE services_new RENAME TO services`);
|
|
||||||
|
|
||||||
// Registries
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE registries_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
url TEXT NOT NULL UNIQUE,
|
|
||||||
username TEXT NOT NULL,
|
|
||||||
password_encrypted TEXT NOT NULL,
|
|
||||||
created_at REAL NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO registries_new SELECT * FROM registries`);
|
|
||||||
this.query(`DROP TABLE registries`);
|
|
||||||
this.query(`ALTER TABLE registries_new RENAME TO registries`);
|
|
||||||
|
|
||||||
// Nginx configs
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE nginx_configs_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
service_id INTEGER NOT NULL,
|
|
||||||
domain TEXT NOT NULL,
|
|
||||||
port INTEGER NOT NULL,
|
|
||||||
ssl_enabled INTEGER NOT NULL DEFAULT 0,
|
|
||||||
config_template TEXT NOT NULL,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL,
|
|
||||||
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO nginx_configs_new SELECT * FROM nginx_configs`);
|
|
||||||
this.query(`DROP TABLE nginx_configs`);
|
|
||||||
this.query(`ALTER TABLE nginx_configs_new RENAME TO nginx_configs`);
|
|
||||||
|
|
||||||
// DNS records
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE dns_records_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
domain TEXT NOT NULL UNIQUE,
|
|
||||||
type TEXT NOT NULL,
|
|
||||||
value TEXT NOT NULL,
|
|
||||||
cloudflare_id TEXT,
|
|
||||||
zone_id TEXT,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO dns_records_new SELECT * FROM dns_records`);
|
|
||||||
this.query(`DROP TABLE dns_records`);
|
|
||||||
this.query(`ALTER TABLE dns_records_new RENAME TO dns_records`);
|
|
||||||
|
|
||||||
// Metrics
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE metrics_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
service_id INTEGER NOT NULL,
|
|
||||||
timestamp REAL NOT NULL,
|
|
||||||
cpu_percent REAL NOT NULL,
|
|
||||||
memory_used INTEGER NOT NULL,
|
|
||||||
memory_limit INTEGER NOT NULL,
|
|
||||||
network_rx_bytes INTEGER NOT NULL,
|
|
||||||
network_tx_bytes INTEGER NOT NULL,
|
|
||||||
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO metrics_new SELECT * FROM metrics`);
|
|
||||||
this.query(`DROP TABLE metrics`);
|
|
||||||
this.query(`ALTER TABLE metrics_new RENAME TO metrics`);
|
|
||||||
this.query(`CREATE INDEX IF NOT EXISTS idx_metrics_service_timestamp ON metrics(service_id, timestamp DESC)`);
|
|
||||||
|
|
||||||
// Logs
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE logs_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
service_id INTEGER NOT NULL,
|
|
||||||
timestamp REAL NOT NULL,
|
|
||||||
message TEXT NOT NULL,
|
|
||||||
level TEXT NOT NULL,
|
|
||||||
source TEXT NOT NULL,
|
|
||||||
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO logs_new SELECT * FROM logs`);
|
|
||||||
this.query(`DROP TABLE logs`);
|
|
||||||
this.query(`ALTER TABLE logs_new RENAME TO logs`);
|
|
||||||
this.query(`CREATE INDEX IF NOT EXISTS idx_logs_service_timestamp ON logs(service_id, timestamp DESC)`);
|
|
||||||
|
|
||||||
// Users
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE users_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
username TEXT NOT NULL UNIQUE,
|
|
||||||
password_hash TEXT NOT NULL,
|
|
||||||
role TEXT NOT NULL DEFAULT 'user',
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO users_new SELECT * FROM users`);
|
|
||||||
this.query(`DROP TABLE users`);
|
|
||||||
this.query(`ALTER TABLE users_new RENAME TO users`);
|
|
||||||
|
|
||||||
// Settings
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE settings_new (
|
|
||||||
key TEXT PRIMARY KEY,
|
|
||||||
value TEXT NOT NULL,
|
|
||||||
updated_at REAL NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO settings_new SELECT * FROM settings`);
|
|
||||||
this.query(`DROP TABLE settings`);
|
|
||||||
this.query(`ALTER TABLE settings_new RENAME TO settings`);
|
|
||||||
|
|
||||||
// Migrations table itself
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE migrations_new (
|
|
||||||
version INTEGER PRIMARY KEY,
|
|
||||||
applied_at REAL NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
this.query(`INSERT INTO migrations_new SELECT * FROM migrations`);
|
|
||||||
this.query(`DROP TABLE migrations`);
|
|
||||||
this.query(`ALTER TABLE migrations_new RENAME TO migrations`);
|
|
||||||
|
|
||||||
this.setMigrationVersion(2);
|
|
||||||
logger.success('Migration 2 completed: All timestamps converted to REAL');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migration 3: Domain management tables
|
|
||||||
const version3 = this.getMigrationVersion();
|
|
||||||
if (version3 < 3) {
|
|
||||||
logger.info('Running migration 3: Creating domain management tables...');
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE domains (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
domain TEXT NOT NULL UNIQUE,
|
|
||||||
dns_provider TEXT,
|
|
||||||
cloudflare_zone_id TEXT,
|
|
||||||
is_obsolete INTEGER NOT NULL DEFAULT 0,
|
|
||||||
default_wildcard INTEGER NOT NULL DEFAULT 1,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE certificates (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
domain_id INTEGER NOT NULL,
|
|
||||||
cert_domain TEXT NOT NULL,
|
|
||||||
is_wildcard INTEGER NOT NULL DEFAULT 0,
|
|
||||||
cert_path TEXT NOT NULL,
|
|
||||||
key_path TEXT NOT NULL,
|
|
||||||
full_chain_path TEXT NOT NULL,
|
|
||||||
expiry_date REAL NOT NULL,
|
|
||||||
issuer TEXT NOT NULL,
|
|
||||||
is_valid INTEGER NOT NULL DEFAULT 1,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL,
|
|
||||||
FOREIGN KEY (domain_id) REFERENCES domains(id) ON DELETE CASCADE
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE cert_requirements (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
service_id INTEGER NOT NULL,
|
|
||||||
domain_id INTEGER NOT NULL,
|
|
||||||
subdomain TEXT NOT NULL,
|
|
||||||
certificate_id INTEGER,
|
|
||||||
status TEXT NOT NULL DEFAULT 'pending',
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL,
|
|
||||||
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE,
|
|
||||||
FOREIGN KEY (domain_id) REFERENCES domains(id) ON DELETE CASCADE,
|
|
||||||
FOREIGN KEY (certificate_id) REFERENCES certificates(id) ON DELETE SET NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
interface OldSslCert {
|
|
||||||
id?: number;
|
|
||||||
domain?: string;
|
|
||||||
cert_path?: string;
|
|
||||||
key_path?: string;
|
|
||||||
full_chain_path?: string;
|
|
||||||
expiry_date?: number;
|
|
||||||
issuer?: string;
|
|
||||||
created_at?: number;
|
|
||||||
updated_at?: number;
|
|
||||||
[key: number]: unknown;
|
|
||||||
}
|
|
||||||
const existingCerts = this.query<OldSslCert>('SELECT * FROM ssl_certificates');
|
|
||||||
|
|
||||||
const now = Date.now();
|
|
||||||
const domainMap = new Map<string, number>();
|
|
||||||
|
|
||||||
for (const cert of existingCerts) {
|
|
||||||
const domain = String(cert.domain ?? (cert as Record<number, unknown>)[1]);
|
|
||||||
if (!domainMap.has(domain)) {
|
|
||||||
this.query(
|
|
||||||
'INSERT INTO domains (domain, dns_provider, is_obsolete, default_wildcard, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)',
|
|
||||||
[domain, null, 0, 1, now, now]
|
|
||||||
);
|
|
||||||
const result = this.query<{ id?: number; [key: number]: unknown }>('SELECT last_insert_rowid() as id');
|
|
||||||
const domainId = result[0].id ?? (result[0] as Record<number, unknown>)[0];
|
|
||||||
domainMap.set(domain, Number(domainId));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const cert of existingCerts) {
|
|
||||||
const domain = String(cert.domain ?? (cert as Record<number, unknown>)[1]);
|
|
||||||
const domainId = domainMap.get(domain);
|
|
||||||
|
|
||||||
this.query(
|
|
||||||
`INSERT INTO certificates (
|
|
||||||
domain_id, cert_domain, is_wildcard, cert_path, key_path, full_chain_path,
|
|
||||||
expiry_date, issuer, is_valid, created_at, updated_at
|
|
||||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
|
||||||
[
|
|
||||||
domainId,
|
|
||||||
domain,
|
|
||||||
0,
|
|
||||||
String(cert.cert_path ?? (cert as Record<number, unknown>)[2]),
|
|
||||||
String(cert.key_path ?? (cert as Record<number, unknown>)[3]),
|
|
||||||
String(cert.full_chain_path ?? (cert as Record<number, unknown>)[4]),
|
|
||||||
Number(cert.expiry_date ?? (cert as Record<number, unknown>)[5]),
|
|
||||||
String(cert.issuer ?? (cert as Record<number, unknown>)[6]),
|
|
||||||
1,
|
|
||||||
Number(cert.created_at ?? (cert as Record<number, unknown>)[7]),
|
|
||||||
Number(cert.updated_at ?? (cert as Record<number, unknown>)[8])
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.query('DROP TABLE ssl_certificates');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_domains_cloudflare_zone ON domains(cloudflare_zone_id)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_certificates_domain ON certificates(domain_id)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_certificates_expiry ON certificates(expiry_date)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_cert_requirements_service ON cert_requirements(service_id)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_cert_requirements_domain ON cert_requirements(domain_id)');
|
|
||||||
|
|
||||||
this.setMigrationVersion(3);
|
|
||||||
logger.success('Migration 3 completed: Domain management tables created');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migration 4: Add Onebox Registry support columns
|
|
||||||
const version4 = this.getMigrationVersion();
|
|
||||||
if (version4 < 4) {
|
|
||||||
logger.info('Running migration 4: Adding Onebox Registry columns to services table...');
|
|
||||||
|
|
||||||
this.query(`ALTER TABLE services ADD COLUMN use_onebox_registry INTEGER DEFAULT 0`);
|
|
||||||
this.query(`ALTER TABLE services ADD COLUMN registry_repository TEXT`);
|
|
||||||
this.query(`ALTER TABLE services ADD COLUMN registry_token TEXT`);
|
|
||||||
this.query(`ALTER TABLE services ADD COLUMN registry_image_tag TEXT DEFAULT 'latest'`);
|
|
||||||
this.query(`ALTER TABLE services ADD COLUMN auto_update_on_push INTEGER DEFAULT 0`);
|
|
||||||
this.query(`ALTER TABLE services ADD COLUMN image_digest TEXT`);
|
|
||||||
|
|
||||||
this.setMigrationVersion(4);
|
|
||||||
logger.success('Migration 4 completed: Onebox Registry columns added to services table');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migration 5: Registry tokens table
|
|
||||||
const version5 = this.getMigrationVersion();
|
|
||||||
if (version5 < 5) {
|
|
||||||
logger.info('Running migration 5: Creating registry_tokens table...');
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE registry_tokens (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
token_hash TEXT NOT NULL UNIQUE,
|
|
||||||
token_type TEXT NOT NULL,
|
|
||||||
scope TEXT NOT NULL,
|
|
||||||
expires_at REAL,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
last_used_at REAL,
|
|
||||||
created_by TEXT NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_registry_tokens_type ON registry_tokens(token_type)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_registry_tokens_hash ON registry_tokens(token_hash)');
|
|
||||||
|
|
||||||
this.setMigrationVersion(5);
|
|
||||||
logger.success('Migration 5 completed: Registry tokens table created');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migration 6: Drop registry_token column from services table
|
|
||||||
const version6 = this.getMigrationVersion();
|
|
||||||
if (version6 < 6) {
|
|
||||||
logger.info('Running migration 6: Dropping registry_token column from services table...');
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE services_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
name TEXT NOT NULL UNIQUE,
|
|
||||||
image TEXT NOT NULL,
|
|
||||||
registry TEXT,
|
|
||||||
env_vars TEXT,
|
|
||||||
port INTEGER NOT NULL,
|
|
||||||
domain TEXT,
|
|
||||||
container_id TEXT,
|
|
||||||
status TEXT NOT NULL,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL,
|
|
||||||
use_onebox_registry INTEGER DEFAULT 0,
|
|
||||||
registry_repository TEXT,
|
|
||||||
registry_image_tag TEXT DEFAULT 'latest',
|
|
||||||
auto_update_on_push INTEGER DEFAULT 0,
|
|
||||||
image_digest TEXT
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
INSERT INTO services_new (
|
|
||||||
id, name, image, registry, env_vars, port, domain, container_id, status,
|
|
||||||
created_at, updated_at, use_onebox_registry, registry_repository,
|
|
||||||
registry_image_tag, auto_update_on_push, image_digest
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
id, name, image, registry, env_vars, port, domain, container_id, status,
|
|
||||||
created_at, updated_at, use_onebox_registry, registry_repository,
|
|
||||||
registry_image_tag, auto_update_on_push, image_digest
|
|
||||||
FROM services
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query('DROP TABLE services');
|
|
||||||
this.query('ALTER TABLE services_new RENAME TO services');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_services_name ON services(name)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_services_status ON services(status)');
|
|
||||||
|
|
||||||
this.setMigrationVersion(6);
|
|
||||||
logger.success('Migration 6 completed: registry_token column dropped from services table');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migration 7: Platform services tables
|
|
||||||
const version7 = this.getMigrationVersion();
|
|
||||||
if (version7 < 7) {
|
|
||||||
logger.info('Running migration 7: Creating platform services tables...');
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE platform_services (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
name TEXT NOT NULL UNIQUE,
|
|
||||||
type TEXT NOT NULL,
|
|
||||||
status TEXT NOT NULL DEFAULT 'stopped',
|
|
||||||
container_id TEXT,
|
|
||||||
config TEXT NOT NULL DEFAULT '{}',
|
|
||||||
admin_credentials_encrypted TEXT,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE platform_resources (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
platform_service_id INTEGER NOT NULL,
|
|
||||||
service_id INTEGER NOT NULL,
|
|
||||||
resource_type TEXT NOT NULL,
|
|
||||||
resource_name TEXT NOT NULL,
|
|
||||||
credentials_encrypted TEXT NOT NULL,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
FOREIGN KEY (platform_service_id) REFERENCES platform_services(id) ON DELETE CASCADE,
|
|
||||||
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query(`ALTER TABLE services ADD COLUMN platform_requirements TEXT DEFAULT '{}'`);
|
|
||||||
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_platform_services_type ON platform_services(type)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_platform_resources_service ON platform_resources(service_id)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_platform_resources_platform ON platform_resources(platform_service_id)');
|
|
||||||
|
|
||||||
this.setMigrationVersion(7);
|
|
||||||
logger.success('Migration 7 completed: Platform services tables created');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migration 8: Convert certificates table to store PEM content
|
|
||||||
const version8 = this.getMigrationVersion();
|
|
||||||
if (version8 < 8) {
|
|
||||||
logger.info('Running migration 8: Converting certificates table to store PEM content...');
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
CREATE TABLE certificates_new (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
domain_id INTEGER NOT NULL,
|
|
||||||
cert_domain TEXT NOT NULL,
|
|
||||||
is_wildcard INTEGER NOT NULL DEFAULT 0,
|
|
||||||
cert_pem TEXT NOT NULL DEFAULT '',
|
|
||||||
key_pem TEXT NOT NULL DEFAULT '',
|
|
||||||
fullchain_pem TEXT NOT NULL DEFAULT '',
|
|
||||||
expiry_date REAL NOT NULL,
|
|
||||||
issuer TEXT NOT NULL,
|
|
||||||
is_valid INTEGER NOT NULL DEFAULT 1,
|
|
||||||
created_at REAL NOT NULL,
|
|
||||||
updated_at REAL NOT NULL,
|
|
||||||
FOREIGN KEY (domain_id) REFERENCES domains(id) ON DELETE CASCADE
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query(`
|
|
||||||
INSERT INTO certificates_new (id, domain_id, cert_domain, is_wildcard, cert_pem, key_pem, fullchain_pem, expiry_date, issuer, is_valid, created_at, updated_at)
|
|
||||||
SELECT id, domain_id, cert_domain, is_wildcard, '', '', '', expiry_date, issuer, 0, created_at, updated_at FROM certificates
|
|
||||||
`);
|
|
||||||
|
|
||||||
this.query('DROP TABLE certificates');
|
|
||||||
this.query('ALTER TABLE certificates_new RENAME TO certificates');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_certificates_domain ON certificates(domain_id)');
|
|
||||||
this.query('CREATE INDEX IF NOT EXISTS idx_certificates_expiry ON certificates(expiry_date)');
|
|
||||||
|
|
||||||
this.setMigrationVersion(8);
|
|
||||||
logger.success('Migration 8 completed: Certificates table now stores PEM content');
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`Migration failed: ${getErrorMessage(error)}`);
|
|
||||||
if (error instanceof Error && error.stack) {
|
|
||||||
logger.error(`Stack: ${error.stack}`);
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get current migration version
|
|
||||||
*/
|
|
||||||
private getMigrationVersion(): number {
|
|
||||||
if (!this.db) throw new Error('Database not initialized');
|
|
||||||
|
|
||||||
try {
|
|
||||||
const result = this.query<{ version?: number | null; [key: number]: unknown }>('SELECT MAX(version) as version FROM migrations');
|
|
||||||
if (result.length === 0) return 0;
|
|
||||||
|
|
||||||
const versionValue = result[0].version ?? (result[0] as Record<number, unknown>)[0];
|
|
||||||
return versionValue !== null && versionValue !== undefined ? Number(versionValue) : 0;
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn(`Error getting migration version: ${getErrorMessage(error)}, defaulting to 0`);
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set migration version
|
|
||||||
*/
|
|
||||||
private setMigrationVersion(version: number): void {
|
|
||||||
if (!this.db) throw new Error('Database not initialized');
|
|
||||||
|
|
||||||
this.query('INSERT INTO migrations (version, applied_at) VALUES (?, ?)', [
|
|
||||||
version,
|
|
||||||
Date.now(),
|
|
||||||
]);
|
|
||||||
logger.debug(`Migration version set to ${version}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Close database connection
|
* Close database connection
|
||||||
*/
|
*/
|
||||||
@@ -1078,4 +576,68 @@ export class OneboxDatabase {
|
|||||||
deletePlatformResourcesByService(serviceId: number): void {
|
deletePlatformResourcesByService(serviceId: number): void {
|
||||||
this.platformRepo.deletePlatformResourcesByService(serviceId);
|
this.platformRepo.deletePlatformResourcesByService(serviceId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============ Backups (delegated to repository) ============
|
||||||
|
|
||||||
|
createBackup(backup: Omit<IBackup, 'id'>): IBackup {
|
||||||
|
return this.backupRepo.create(backup);
|
||||||
|
}
|
||||||
|
|
||||||
|
getBackupById(id: number): IBackup | null {
|
||||||
|
return this.backupRepo.getById(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
getBackupsByService(serviceId: number): IBackup[] {
|
||||||
|
return this.backupRepo.getByService(serviceId);
|
||||||
|
}
|
||||||
|
|
||||||
|
getAllBackups(): IBackup[] {
|
||||||
|
return this.backupRepo.getAll();
|
||||||
|
}
|
||||||
|
|
||||||
|
deleteBackup(id: number): void {
|
||||||
|
this.backupRepo.delete(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
deleteBackupsByService(serviceId: number): void {
|
||||||
|
this.backupRepo.deleteByService(serviceId);
|
||||||
|
}
|
||||||
|
|
||||||
|
getBackupsBySchedule(scheduleId: number): IBackup[] {
|
||||||
|
return this.backupRepo.getBySchedule(scheduleId);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============ Backup Schedules (delegated to repository) ============
|
||||||
|
|
||||||
|
createBackupSchedule(schedule: Omit<IBackupSchedule, 'id'>): IBackupSchedule {
|
||||||
|
return this.backupRepo.createSchedule(schedule);
|
||||||
|
}
|
||||||
|
|
||||||
|
getBackupScheduleById(id: number): IBackupSchedule | null {
|
||||||
|
return this.backupRepo.getScheduleById(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
getBackupSchedulesByService(serviceId: number): IBackupSchedule[] {
|
||||||
|
return this.backupRepo.getSchedulesByService(serviceId);
|
||||||
|
}
|
||||||
|
|
||||||
|
getEnabledBackupSchedules(): IBackupSchedule[] {
|
||||||
|
return this.backupRepo.getEnabledSchedules();
|
||||||
|
}
|
||||||
|
|
||||||
|
getAllBackupSchedules(): IBackupSchedule[] {
|
||||||
|
return this.backupRepo.getAllSchedules();
|
||||||
|
}
|
||||||
|
|
||||||
|
updateBackupSchedule(id: number, updates: IBackupScheduleUpdate & { lastRunAt?: number; nextRunAt?: number; lastStatus?: 'success' | 'failed' | null; lastError?: string | null }): void {
|
||||||
|
this.backupRepo.updateSchedule(id, updates);
|
||||||
|
}
|
||||||
|
|
||||||
|
deleteBackupSchedule(id: number): void {
|
||||||
|
this.backupRepo.deleteSchedule(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
deleteBackupSchedulesByService(serviceId: number): void {
|
||||||
|
this.backupRepo.deleteSchedulesByService(serviceId);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
22
ts/database/migrations/base-migration.ts
Normal file
22
ts/database/migrations/base-migration.ts
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
/**
|
||||||
|
* Abstract base class for database migrations.
|
||||||
|
* All migrations must extend this class and implement the abstract members.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export abstract class BaseMigration {
|
||||||
|
/** The migration version number (must be unique and sequential) */
|
||||||
|
abstract readonly version: number;
|
||||||
|
|
||||||
|
/** A short description of what this migration does */
|
||||||
|
abstract readonly description: string;
|
||||||
|
|
||||||
|
/** Execute the migration's SQL statements */
|
||||||
|
abstract up(query: TQueryFunction): void;
|
||||||
|
|
||||||
|
/** Returns a human-readable name for logging */
|
||||||
|
getName(): string {
|
||||||
|
return `Migration ${this.version}: ${this.description}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
2
ts/database/migrations/index.ts
Normal file
2
ts/database/migrations/index.ts
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
export { BaseMigration } from './base-migration.ts';
|
||||||
|
export { MigrationRunner } from './migration-runner.ts';
|
||||||
12
ts/database/migrations/migration-001-initial.ts
Normal file
12
ts/database/migrations/migration-001-initial.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration001Initial extends BaseMigration {
|
||||||
|
readonly version = 1;
|
||||||
|
readonly description = 'Initial schema';
|
||||||
|
|
||||||
|
up(_query: TQueryFunction): void {
|
||||||
|
// Initial schema is created by createTables() in the database class.
|
||||||
|
// This migration just marks the initial version.
|
||||||
|
}
|
||||||
|
}
|
||||||
170
ts/database/migrations/migration-002-timestamps-to-real.ts
Normal file
170
ts/database/migrations/migration-002-timestamps-to-real.ts
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration002TimestampsToReal extends BaseMigration {
|
||||||
|
readonly version = 2;
|
||||||
|
readonly description = 'Convert timestamp columns from INTEGER to REAL';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
// SSL certificates
|
||||||
|
query(`
|
||||||
|
CREATE TABLE ssl_certificates_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
domain TEXT NOT NULL UNIQUE,
|
||||||
|
cert_path TEXT NOT NULL,
|
||||||
|
key_path TEXT NOT NULL,
|
||||||
|
full_chain_path TEXT NOT NULL,
|
||||||
|
expiry_date REAL NOT NULL,
|
||||||
|
issuer TEXT NOT NULL,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO ssl_certificates_new SELECT * FROM ssl_certificates`);
|
||||||
|
query(`DROP TABLE ssl_certificates`);
|
||||||
|
query(`ALTER TABLE ssl_certificates_new RENAME TO ssl_certificates`);
|
||||||
|
|
||||||
|
// Services
|
||||||
|
query(`
|
||||||
|
CREATE TABLE services_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name TEXT NOT NULL UNIQUE,
|
||||||
|
image TEXT NOT NULL,
|
||||||
|
registry TEXT,
|
||||||
|
env_vars TEXT NOT NULL,
|
||||||
|
port INTEGER NOT NULL,
|
||||||
|
domain TEXT,
|
||||||
|
container_id TEXT,
|
||||||
|
status TEXT NOT NULL DEFAULT 'stopped',
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO services_new SELECT * FROM services`);
|
||||||
|
query(`DROP TABLE services`);
|
||||||
|
query(`ALTER TABLE services_new RENAME TO services`);
|
||||||
|
|
||||||
|
// Registries
|
||||||
|
query(`
|
||||||
|
CREATE TABLE registries_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
url TEXT NOT NULL UNIQUE,
|
||||||
|
username TEXT NOT NULL,
|
||||||
|
password_encrypted TEXT NOT NULL,
|
||||||
|
created_at REAL NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO registries_new SELECT * FROM registries`);
|
||||||
|
query(`DROP TABLE registries`);
|
||||||
|
query(`ALTER TABLE registries_new RENAME TO registries`);
|
||||||
|
|
||||||
|
// Nginx configs
|
||||||
|
query(`
|
||||||
|
CREATE TABLE nginx_configs_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
service_id INTEGER NOT NULL,
|
||||||
|
domain TEXT NOT NULL,
|
||||||
|
port INTEGER NOT NULL,
|
||||||
|
ssl_enabled INTEGER NOT NULL DEFAULT 0,
|
||||||
|
config_template TEXT NOT NULL,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO nginx_configs_new SELECT * FROM nginx_configs`);
|
||||||
|
query(`DROP TABLE nginx_configs`);
|
||||||
|
query(`ALTER TABLE nginx_configs_new RENAME TO nginx_configs`);
|
||||||
|
|
||||||
|
// DNS records
|
||||||
|
query(`
|
||||||
|
CREATE TABLE dns_records_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
domain TEXT NOT NULL UNIQUE,
|
||||||
|
type TEXT NOT NULL,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
cloudflare_id TEXT,
|
||||||
|
zone_id TEXT,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO dns_records_new SELECT * FROM dns_records`);
|
||||||
|
query(`DROP TABLE dns_records`);
|
||||||
|
query(`ALTER TABLE dns_records_new RENAME TO dns_records`);
|
||||||
|
|
||||||
|
// Metrics
|
||||||
|
query(`
|
||||||
|
CREATE TABLE metrics_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
service_id INTEGER NOT NULL,
|
||||||
|
timestamp REAL NOT NULL,
|
||||||
|
cpu_percent REAL NOT NULL,
|
||||||
|
memory_used INTEGER NOT NULL,
|
||||||
|
memory_limit INTEGER NOT NULL,
|
||||||
|
network_rx_bytes INTEGER NOT NULL,
|
||||||
|
network_tx_bytes INTEGER NOT NULL,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO metrics_new SELECT * FROM metrics`);
|
||||||
|
query(`DROP TABLE metrics`);
|
||||||
|
query(`ALTER TABLE metrics_new RENAME TO metrics`);
|
||||||
|
query(`CREATE INDEX IF NOT EXISTS idx_metrics_service_timestamp ON metrics(service_id, timestamp DESC)`);
|
||||||
|
|
||||||
|
// Logs
|
||||||
|
query(`
|
||||||
|
CREATE TABLE logs_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
service_id INTEGER NOT NULL,
|
||||||
|
timestamp REAL NOT NULL,
|
||||||
|
message TEXT NOT NULL,
|
||||||
|
level TEXT NOT NULL,
|
||||||
|
source TEXT NOT NULL,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO logs_new SELECT * FROM logs`);
|
||||||
|
query(`DROP TABLE logs`);
|
||||||
|
query(`ALTER TABLE logs_new RENAME TO logs`);
|
||||||
|
query(`CREATE INDEX IF NOT EXISTS idx_logs_service_timestamp ON logs(service_id, timestamp DESC)`);
|
||||||
|
|
||||||
|
// Users
|
||||||
|
query(`
|
||||||
|
CREATE TABLE users_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
username TEXT NOT NULL UNIQUE,
|
||||||
|
password_hash TEXT NOT NULL,
|
||||||
|
role TEXT NOT NULL DEFAULT 'user',
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO users_new SELECT * FROM users`);
|
||||||
|
query(`DROP TABLE users`);
|
||||||
|
query(`ALTER TABLE users_new RENAME TO users`);
|
||||||
|
|
||||||
|
// Settings
|
||||||
|
query(`
|
||||||
|
CREATE TABLE settings_new (
|
||||||
|
key TEXT PRIMARY KEY,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
updated_at REAL NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO settings_new SELECT * FROM settings`);
|
||||||
|
query(`DROP TABLE settings`);
|
||||||
|
query(`ALTER TABLE settings_new RENAME TO settings`);
|
||||||
|
|
||||||
|
// Migrations table itself
|
||||||
|
query(`
|
||||||
|
CREATE TABLE migrations_new (
|
||||||
|
version INTEGER PRIMARY KEY,
|
||||||
|
applied_at REAL NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
query(`INSERT INTO migrations_new SELECT * FROM migrations`);
|
||||||
|
query(`DROP TABLE migrations`);
|
||||||
|
query(`ALTER TABLE migrations_new RENAME TO migrations`);
|
||||||
|
}
|
||||||
|
}
|
||||||
125
ts/database/migrations/migration-003-domain-management.ts
Normal file
125
ts/database/migrations/migration-003-domain-management.ts
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration003DomainManagement extends BaseMigration {
|
||||||
|
readonly version = 3;
|
||||||
|
readonly description = 'Domain management tables';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
query(`
|
||||||
|
CREATE TABLE domains (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
domain TEXT NOT NULL UNIQUE,
|
||||||
|
dns_provider TEXT,
|
||||||
|
cloudflare_zone_id TEXT,
|
||||||
|
is_obsolete INTEGER NOT NULL DEFAULT 0,
|
||||||
|
default_wildcard INTEGER NOT NULL DEFAULT 1,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(`
|
||||||
|
CREATE TABLE certificates (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
domain_id INTEGER NOT NULL,
|
||||||
|
cert_domain TEXT NOT NULL,
|
||||||
|
is_wildcard INTEGER NOT NULL DEFAULT 0,
|
||||||
|
cert_path TEXT NOT NULL,
|
||||||
|
key_path TEXT NOT NULL,
|
||||||
|
full_chain_path TEXT NOT NULL,
|
||||||
|
expiry_date REAL NOT NULL,
|
||||||
|
issuer TEXT NOT NULL,
|
||||||
|
is_valid INTEGER NOT NULL DEFAULT 1,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL,
|
||||||
|
FOREIGN KEY (domain_id) REFERENCES domains(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(`
|
||||||
|
CREATE TABLE cert_requirements (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
service_id INTEGER NOT NULL,
|
||||||
|
domain_id INTEGER NOT NULL,
|
||||||
|
subdomain TEXT NOT NULL,
|
||||||
|
certificate_id INTEGER,
|
||||||
|
status TEXT NOT NULL DEFAULT 'pending',
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE,
|
||||||
|
FOREIGN KEY (domain_id) REFERENCES domains(id) ON DELETE CASCADE,
|
||||||
|
FOREIGN KEY (certificate_id) REFERENCES certificates(id) ON DELETE SET NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Migrate data from old ssl_certificates table
|
||||||
|
interface OldSslCert {
|
||||||
|
id?: number;
|
||||||
|
domain?: string;
|
||||||
|
cert_path?: string;
|
||||||
|
key_path?: string;
|
||||||
|
full_chain_path?: string;
|
||||||
|
expiry_date?: number;
|
||||||
|
issuer?: string;
|
||||||
|
created_at?: number;
|
||||||
|
updated_at?: number;
|
||||||
|
[key: number]: unknown;
|
||||||
|
}
|
||||||
|
const existingCerts = query<OldSslCert>('SELECT * FROM ssl_certificates');
|
||||||
|
|
||||||
|
const now = Date.now();
|
||||||
|
const domainMap = new Map<string, number>();
|
||||||
|
|
||||||
|
for (const cert of existingCerts) {
|
||||||
|
const domain = String(cert.domain ?? (cert as Record<number, unknown>)[1]);
|
||||||
|
if (!domainMap.has(domain)) {
|
||||||
|
query(
|
||||||
|
'INSERT INTO domains (domain, dns_provider, is_obsolete, default_wildcard, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)',
|
||||||
|
[domain, null, 0, 1, now, now],
|
||||||
|
);
|
||||||
|
const result = query<{ id?: number; [key: number]: unknown }>(
|
||||||
|
'SELECT last_insert_rowid() as id',
|
||||||
|
);
|
||||||
|
const domainId = result[0].id ?? (result[0] as Record<number, unknown>)[0];
|
||||||
|
domainMap.set(domain, Number(domainId));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const cert of existingCerts) {
|
||||||
|
const domain = String(cert.domain ?? (cert as Record<number, unknown>)[1]);
|
||||||
|
const domainId = domainMap.get(domain);
|
||||||
|
|
||||||
|
query(
|
||||||
|
`INSERT INTO certificates (
|
||||||
|
domain_id, cert_domain, is_wildcard, cert_path, key_path, full_chain_path,
|
||||||
|
expiry_date, issuer, is_valid, created_at, updated_at
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||||
|
[
|
||||||
|
domainId,
|
||||||
|
domain,
|
||||||
|
0,
|
||||||
|
String(cert.cert_path ?? (cert as Record<number, unknown>)[2]),
|
||||||
|
String(cert.key_path ?? (cert as Record<number, unknown>)[3]),
|
||||||
|
String(cert.full_chain_path ?? (cert as Record<number, unknown>)[4]),
|
||||||
|
Number(cert.expiry_date ?? (cert as Record<number, unknown>)[5]),
|
||||||
|
String(cert.issuer ?? (cert as Record<number, unknown>)[6]),
|
||||||
|
1,
|
||||||
|
Number(cert.created_at ?? (cert as Record<number, unknown>)[7]),
|
||||||
|
Number(cert.updated_at ?? (cert as Record<number, unknown>)[8]),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
query('DROP TABLE ssl_certificates');
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_domains_cloudflare_zone ON domains(cloudflare_zone_id)');
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_certificates_domain ON certificates(domain_id)');
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_certificates_expiry ON certificates(expiry_date)');
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_cert_requirements_service ON cert_requirements(service_id)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_cert_requirements_domain ON cert_requirements(domain_id)',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
16
ts/database/migrations/migration-004-registry-columns.ts
Normal file
16
ts/database/migrations/migration-004-registry-columns.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration004RegistryColumns extends BaseMigration {
|
||||||
|
readonly version = 4;
|
||||||
|
readonly description = 'Add Onebox Registry columns to services table';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
query(`ALTER TABLE services ADD COLUMN use_onebox_registry INTEGER DEFAULT 0`);
|
||||||
|
query(`ALTER TABLE services ADD COLUMN registry_repository TEXT`);
|
||||||
|
query(`ALTER TABLE services ADD COLUMN registry_token TEXT`);
|
||||||
|
query(`ALTER TABLE services ADD COLUMN registry_image_tag TEXT DEFAULT 'latest'`);
|
||||||
|
query(`ALTER TABLE services ADD COLUMN auto_update_on_push INTEGER DEFAULT 0`);
|
||||||
|
query(`ALTER TABLE services ADD COLUMN image_digest TEXT`);
|
||||||
|
}
|
||||||
|
}
|
||||||
30
ts/database/migrations/migration-005-registry-tokens.ts
Normal file
30
ts/database/migrations/migration-005-registry-tokens.ts
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration005RegistryTokens extends BaseMigration {
|
||||||
|
readonly version = 5;
|
||||||
|
readonly description = 'Registry tokens table';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
query(`
|
||||||
|
CREATE TABLE registry_tokens (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
token_hash TEXT NOT NULL UNIQUE,
|
||||||
|
token_type TEXT NOT NULL,
|
||||||
|
scope TEXT NOT NULL,
|
||||||
|
expires_at REAL,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
last_used_at REAL,
|
||||||
|
created_by TEXT NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_registry_tokens_type ON registry_tokens(token_type)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_registry_tokens_hash ON registry_tokens(token_hash)',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
48
ts/database/migrations/migration-006-drop-registry-token.ts
Normal file
48
ts/database/migrations/migration-006-drop-registry-token.ts
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration006DropRegistryToken extends BaseMigration {
|
||||||
|
readonly version = 6;
|
||||||
|
readonly description = 'Drop registry_token column from services table';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
query(`
|
||||||
|
CREATE TABLE services_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name TEXT NOT NULL UNIQUE,
|
||||||
|
image TEXT NOT NULL,
|
||||||
|
registry TEXT,
|
||||||
|
env_vars TEXT,
|
||||||
|
port INTEGER NOT NULL,
|
||||||
|
domain TEXT,
|
||||||
|
container_id TEXT,
|
||||||
|
status TEXT NOT NULL,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL,
|
||||||
|
use_onebox_registry INTEGER DEFAULT 0,
|
||||||
|
registry_repository TEXT,
|
||||||
|
registry_image_tag TEXT DEFAULT 'latest',
|
||||||
|
auto_update_on_push INTEGER DEFAULT 0,
|
||||||
|
image_digest TEXT
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(`
|
||||||
|
INSERT INTO services_new (
|
||||||
|
id, name, image, registry, env_vars, port, domain, container_id, status,
|
||||||
|
created_at, updated_at, use_onebox_registry, registry_repository,
|
||||||
|
registry_image_tag, auto_update_on_push, image_digest
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
id, name, image, registry, env_vars, port, domain, container_id, status,
|
||||||
|
created_at, updated_at, use_onebox_registry, registry_repository,
|
||||||
|
registry_image_tag, auto_update_on_push, image_digest
|
||||||
|
FROM services
|
||||||
|
`);
|
||||||
|
|
||||||
|
query('DROP TABLE services');
|
||||||
|
query('ALTER TABLE services_new RENAME TO services');
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_services_name ON services(name)');
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_services_status ON services(status)');
|
||||||
|
}
|
||||||
|
}
|
||||||
49
ts/database/migrations/migration-007-platform-services.ts
Normal file
49
ts/database/migrations/migration-007-platform-services.ts
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration007PlatformServices extends BaseMigration {
|
||||||
|
readonly version = 7;
|
||||||
|
readonly description = 'Platform services tables';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
query(`
|
||||||
|
CREATE TABLE platform_services (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name TEXT NOT NULL UNIQUE,
|
||||||
|
type TEXT NOT NULL,
|
||||||
|
status TEXT NOT NULL DEFAULT 'stopped',
|
||||||
|
container_id TEXT,
|
||||||
|
config TEXT NOT NULL DEFAULT '{}',
|
||||||
|
admin_credentials_encrypted TEXT,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(`
|
||||||
|
CREATE TABLE platform_resources (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
platform_service_id INTEGER NOT NULL,
|
||||||
|
service_id INTEGER NOT NULL,
|
||||||
|
resource_type TEXT NOT NULL,
|
||||||
|
resource_name TEXT NOT NULL,
|
||||||
|
credentials_encrypted TEXT NOT NULL,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
FOREIGN KEY (platform_service_id) REFERENCES platform_services(id) ON DELETE CASCADE,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(`ALTER TABLE services ADD COLUMN platform_requirements TEXT DEFAULT '{}'`);
|
||||||
|
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_platform_services_type ON platform_services(type)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_platform_resources_service ON platform_resources(service_id)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_platform_resources_platform ON platform_resources(platform_service_id)',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
41
ts/database/migrations/migration-008-cert-pem-content.ts
Normal file
41
ts/database/migrations/migration-008-cert-pem-content.ts
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration008CertPemContent extends BaseMigration {
|
||||||
|
readonly version = 8;
|
||||||
|
readonly description = 'Convert certificates table to store PEM content';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
query(`
|
||||||
|
CREATE TABLE certificates_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
domain_id INTEGER NOT NULL,
|
||||||
|
cert_domain TEXT NOT NULL,
|
||||||
|
is_wildcard INTEGER NOT NULL DEFAULT 0,
|
||||||
|
cert_pem TEXT NOT NULL DEFAULT '',
|
||||||
|
key_pem TEXT NOT NULL DEFAULT '',
|
||||||
|
fullchain_pem TEXT NOT NULL DEFAULT '',
|
||||||
|
expiry_date REAL NOT NULL,
|
||||||
|
issuer TEXT NOT NULL,
|
||||||
|
is_valid INTEGER NOT NULL DEFAULT 1,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL,
|
||||||
|
FOREIGN KEY (domain_id) REFERENCES domains(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(`
|
||||||
|
INSERT INTO certificates_new (id, domain_id, cert_domain, is_wildcard, cert_pem, key_pem, fullchain_pem, expiry_date, issuer, is_valid, created_at, updated_at)
|
||||||
|
SELECT id, domain_id, cert_domain, is_wildcard, '', '', '', expiry_date, issuer, 0, created_at, updated_at FROM certificates
|
||||||
|
`);
|
||||||
|
|
||||||
|
query('DROP TABLE certificates');
|
||||||
|
query('ALTER TABLE certificates_new RENAME TO certificates');
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_certificates_domain ON certificates(domain_id)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_certificates_expiry ON certificates(expiry_date)',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
29
ts/database/migrations/migration-009-backup-system.ts
Normal file
29
ts/database/migrations/migration-009-backup-system.ts
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration009BackupSystem extends BaseMigration {
|
||||||
|
readonly version = 9;
|
||||||
|
readonly description = 'Backup system tables';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
query(`ALTER TABLE services ADD COLUMN include_image_in_backup INTEGER DEFAULT 1`);
|
||||||
|
|
||||||
|
query(`
|
||||||
|
CREATE TABLE backups (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
service_id INTEGER NOT NULL,
|
||||||
|
service_name TEXT NOT NULL,
|
||||||
|
filename TEXT NOT NULL,
|
||||||
|
size_bytes INTEGER NOT NULL,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
includes_image INTEGER NOT NULL,
|
||||||
|
platform_resources TEXT NOT NULL DEFAULT '[]',
|
||||||
|
checksum TEXT NOT NULL,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_backups_service ON backups(service_id)');
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_backups_created ON backups(created_at DESC)');
|
||||||
|
}
|
||||||
|
}
|
||||||
39
ts/database/migrations/migration-010-backup-schedules.ts
Normal file
39
ts/database/migrations/migration-010-backup-schedules.ts
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration010BackupSchedules extends BaseMigration {
|
||||||
|
readonly version = 10;
|
||||||
|
readonly description = 'Backup schedules table';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
query(`
|
||||||
|
CREATE TABLE backup_schedules (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
service_id INTEGER NOT NULL,
|
||||||
|
service_name TEXT NOT NULL,
|
||||||
|
cron_expression TEXT NOT NULL,
|
||||||
|
retention_tier TEXT NOT NULL,
|
||||||
|
enabled INTEGER NOT NULL DEFAULT 1,
|
||||||
|
last_run_at REAL,
|
||||||
|
next_run_at REAL,
|
||||||
|
last_status TEXT,
|
||||||
|
last_error TEXT,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_backup_schedules_service ON backup_schedules(service_id)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_backup_schedules_enabled ON backup_schedules(enabled)',
|
||||||
|
);
|
||||||
|
|
||||||
|
query('ALTER TABLE backups ADD COLUMN retention_tier TEXT');
|
||||||
|
query(
|
||||||
|
'ALTER TABLE backups ADD COLUMN schedule_id INTEGER REFERENCES backup_schedules(id) ON DELETE SET NULL',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
54
ts/database/migrations/migration-011-scope-columns.ts
Normal file
54
ts/database/migrations/migration-011-scope-columns.ts
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration011ScopeColumns extends BaseMigration {
|
||||||
|
readonly version = 11;
|
||||||
|
readonly description = 'Add scope columns to backup_schedules';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
query(`
|
||||||
|
CREATE TABLE backup_schedules_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
scope_type TEXT NOT NULL DEFAULT 'service',
|
||||||
|
scope_pattern TEXT,
|
||||||
|
service_id INTEGER,
|
||||||
|
service_name TEXT,
|
||||||
|
cron_expression TEXT NOT NULL,
|
||||||
|
retention_tier TEXT NOT NULL,
|
||||||
|
enabled INTEGER NOT NULL DEFAULT 1,
|
||||||
|
last_run_at REAL,
|
||||||
|
next_run_at REAL,
|
||||||
|
last_status TEXT,
|
||||||
|
last_error TEXT,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(`
|
||||||
|
INSERT INTO backup_schedules_new (
|
||||||
|
id, scope_type, scope_pattern, service_id, service_name, cron_expression,
|
||||||
|
retention_tier, enabled, last_run_at, next_run_at, last_status, last_error,
|
||||||
|
created_at, updated_at
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
id, 'service', NULL, service_id, service_name, cron_expression,
|
||||||
|
retention_tier, enabled, last_run_at, next_run_at, last_status, last_error,
|
||||||
|
created_at, updated_at
|
||||||
|
FROM backup_schedules
|
||||||
|
`);
|
||||||
|
|
||||||
|
query('DROP TABLE backup_schedules');
|
||||||
|
query('ALTER TABLE backup_schedules_new RENAME TO backup_schedules');
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_backup_schedules_service ON backup_schedules(service_id)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_backup_schedules_enabled ON backup_schedules(enabled)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_backup_schedules_scope ON backup_schedules(scope_type)',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
97
ts/database/migrations/migration-012-gfs-retention.ts
Normal file
97
ts/database/migrations/migration-012-gfs-retention.ts
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
import { BaseMigration } from './base-migration.ts';
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
|
||||||
|
export class Migration012GfsRetention extends BaseMigration {
|
||||||
|
readonly version = 12;
|
||||||
|
readonly description = 'GFS retention policy schema';
|
||||||
|
|
||||||
|
up(query: TQueryFunction): void {
|
||||||
|
// Recreate backup_schedules with GFS retention columns
|
||||||
|
query(`
|
||||||
|
CREATE TABLE backup_schedules_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
scope_type TEXT NOT NULL DEFAULT 'service',
|
||||||
|
scope_pattern TEXT,
|
||||||
|
service_id INTEGER,
|
||||||
|
service_name TEXT,
|
||||||
|
cron_expression TEXT NOT NULL,
|
||||||
|
retention_hourly INTEGER NOT NULL DEFAULT 0,
|
||||||
|
retention_daily INTEGER NOT NULL DEFAULT 7,
|
||||||
|
retention_weekly INTEGER NOT NULL DEFAULT 4,
|
||||||
|
retention_monthly INTEGER NOT NULL DEFAULT 12,
|
||||||
|
enabled INTEGER NOT NULL DEFAULT 1,
|
||||||
|
last_run_at REAL,
|
||||||
|
next_run_at REAL,
|
||||||
|
last_status TEXT,
|
||||||
|
last_error TEXT,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
updated_at REAL NOT NULL,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Migrate existing data - convert old retention_tier to new format
|
||||||
|
query(`
|
||||||
|
INSERT INTO backup_schedules_new (
|
||||||
|
id, scope_type, scope_pattern, service_id, service_name, cron_expression,
|
||||||
|
retention_hourly, retention_daily, retention_weekly, retention_monthly,
|
||||||
|
enabled, last_run_at, next_run_at, last_status, last_error, created_at, updated_at
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
id, scope_type, scope_pattern, service_id, service_name, cron_expression,
|
||||||
|
0,
|
||||||
|
CASE WHEN retention_tier = 'daily' THEN 7 ELSE 0 END,
|
||||||
|
CASE WHEN retention_tier IN ('daily', 'weekly') THEN 4 ELSE 0 END,
|
||||||
|
CASE WHEN retention_tier IN ('daily', 'weekly', 'monthly') THEN 12
|
||||||
|
WHEN retention_tier = 'yearly' THEN 24 ELSE 12 END,
|
||||||
|
enabled, last_run_at, next_run_at, last_status, last_error, created_at, updated_at
|
||||||
|
FROM backup_schedules
|
||||||
|
`);
|
||||||
|
|
||||||
|
query('DROP TABLE backup_schedules');
|
||||||
|
query('ALTER TABLE backup_schedules_new RENAME TO backup_schedules');
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_backup_schedules_service ON backup_schedules(service_id)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_backup_schedules_enabled ON backup_schedules(enabled)',
|
||||||
|
);
|
||||||
|
query(
|
||||||
|
'CREATE INDEX IF NOT EXISTS idx_backup_schedules_scope ON backup_schedules(scope_type)',
|
||||||
|
);
|
||||||
|
|
||||||
|
// Recreate backups table without retention_tier column
|
||||||
|
query(`
|
||||||
|
CREATE TABLE backups_new (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
service_id INTEGER NOT NULL,
|
||||||
|
service_name TEXT NOT NULL,
|
||||||
|
filename TEXT NOT NULL,
|
||||||
|
size_bytes INTEGER NOT NULL,
|
||||||
|
created_at REAL NOT NULL,
|
||||||
|
includes_image INTEGER NOT NULL,
|
||||||
|
platform_resources TEXT NOT NULL DEFAULT '[]',
|
||||||
|
checksum TEXT NOT NULL,
|
||||||
|
schedule_id INTEGER REFERENCES backup_schedules(id) ON DELETE SET NULL,
|
||||||
|
FOREIGN KEY (service_id) REFERENCES services(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
query(`
|
||||||
|
INSERT INTO backups_new (
|
||||||
|
id, service_id, service_name, filename, size_bytes, created_at,
|
||||||
|
includes_image, platform_resources, checksum, schedule_id
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
id, service_id, service_name, filename, size_bytes, created_at,
|
||||||
|
includes_image, platform_resources, checksum, schedule_id
|
||||||
|
FROM backups
|
||||||
|
`);
|
||||||
|
|
||||||
|
query('DROP TABLE backups');
|
||||||
|
query('ALTER TABLE backups_new RENAME TO backups');
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_backups_service ON backups(service_id)');
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_backups_created ON backups(created_at DESC)');
|
||||||
|
query('CREATE INDEX IF NOT EXISTS idx_backups_schedule ON backups(schedule_id)');
|
||||||
|
}
|
||||||
|
}
|
||||||
100
ts/database/migrations/migration-runner.ts
Normal file
100
ts/database/migrations/migration-runner.ts
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
/**
|
||||||
|
* Migration runner - discovers, orders, and executes database migrations.
|
||||||
|
* Mirrors the pattern from @serve.zone/nupst.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { TQueryFunction } from '../types.ts';
|
||||||
|
import { logger } from '../../logging.ts';
|
||||||
|
import { getErrorMessage } from '../../utils/error.ts';
|
||||||
|
|
||||||
|
import { Migration001Initial } from './migration-001-initial.ts';
|
||||||
|
import { Migration002TimestampsToReal } from './migration-002-timestamps-to-real.ts';
|
||||||
|
import { Migration003DomainManagement } from './migration-003-domain-management.ts';
|
||||||
|
import { Migration004RegistryColumns } from './migration-004-registry-columns.ts';
|
||||||
|
import { Migration005RegistryTokens } from './migration-005-registry-tokens.ts';
|
||||||
|
import { Migration006DropRegistryToken } from './migration-006-drop-registry-token.ts';
|
||||||
|
import { Migration007PlatformServices } from './migration-007-platform-services.ts';
|
||||||
|
import { Migration008CertPemContent } from './migration-008-cert-pem-content.ts';
|
||||||
|
import { Migration009BackupSystem } from './migration-009-backup-system.ts';
|
||||||
|
import { Migration010BackupSchedules } from './migration-010-backup-schedules.ts';
|
||||||
|
import { Migration011ScopeColumns } from './migration-011-scope-columns.ts';
|
||||||
|
import { Migration012GfsRetention } from './migration-012-gfs-retention.ts';
|
||||||
|
import type { BaseMigration } from './base-migration.ts';
|
||||||
|
|
||||||
|
export class MigrationRunner {
|
||||||
|
private query: TQueryFunction;
|
||||||
|
private migrations: BaseMigration[];
|
||||||
|
|
||||||
|
constructor(query: TQueryFunction) {
|
||||||
|
this.query = query;
|
||||||
|
|
||||||
|
// Register all migrations in order
|
||||||
|
this.migrations = [
|
||||||
|
new Migration001Initial(),
|
||||||
|
new Migration002TimestampsToReal(),
|
||||||
|
new Migration003DomainManagement(),
|
||||||
|
new Migration004RegistryColumns(),
|
||||||
|
new Migration005RegistryTokens(),
|
||||||
|
new Migration006DropRegistryToken(),
|
||||||
|
new Migration007PlatformServices(),
|
||||||
|
new Migration008CertPemContent(),
|
||||||
|
new Migration009BackupSystem(),
|
||||||
|
new Migration010BackupSchedules(),
|
||||||
|
new Migration011ScopeColumns(),
|
||||||
|
new Migration012GfsRetention(),
|
||||||
|
].sort((a, b) => a.version - b.version);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Run all pending migrations */
|
||||||
|
run(): void {
|
||||||
|
try {
|
||||||
|
const currentVersion = this.getMigrationVersion();
|
||||||
|
logger.info(`Current database migration version: ${currentVersion}`);
|
||||||
|
|
||||||
|
let applied = 0;
|
||||||
|
for (const migration of this.migrations) {
|
||||||
|
if (migration.version <= currentVersion) continue;
|
||||||
|
|
||||||
|
logger.info(`Running ${migration.getName()}...`);
|
||||||
|
migration.up(this.query);
|
||||||
|
this.setMigrationVersion(migration.version);
|
||||||
|
logger.success(`${migration.getName()} completed`);
|
||||||
|
applied++;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (applied > 0) {
|
||||||
|
logger.success(`Applied ${applied} migration(s)`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Migration failed: ${getErrorMessage(error)}`);
|
||||||
|
if (error instanceof Error && error.stack) {
|
||||||
|
logger.error(`Stack: ${error.stack}`);
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Get current migration version from the migrations table */
|
||||||
|
private getMigrationVersion(): number {
|
||||||
|
try {
|
||||||
|
const result = this.query<{ version?: number | null; [key: number]: unknown }>(
|
||||||
|
'SELECT MAX(version) as version FROM migrations',
|
||||||
|
);
|
||||||
|
if (result.length === 0) return 0;
|
||||||
|
|
||||||
|
const versionValue = result[0].version ?? (result[0] as Record<number, unknown>)[0];
|
||||||
|
return versionValue !== null && versionValue !== undefined ? Number(versionValue) : 0;
|
||||||
|
} catch {
|
||||||
|
// Table might not exist yet on fresh databases
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Record a migration version as applied */
|
||||||
|
private setMigrationVersion(version: number): void {
|
||||||
|
this.query('INSERT INTO migrations (version, applied_at) VALUES (?, ?)', [
|
||||||
|
version,
|
||||||
|
Date.now(),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
249
ts/database/repositories/backup.repository.ts
Normal file
249
ts/database/repositories/backup.repository.ts
Normal file
@@ -0,0 +1,249 @@
|
|||||||
|
/**
|
||||||
|
* Backup Repository
|
||||||
|
* Handles CRUD operations for backups and backup_schedules tables
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { BaseRepository } from '../base.repository.ts';
|
||||||
|
import type {
|
||||||
|
IBackup,
|
||||||
|
IBackupSchedule,
|
||||||
|
IBackupScheduleUpdate,
|
||||||
|
TPlatformServiceType,
|
||||||
|
TBackupScheduleScope,
|
||||||
|
IRetentionPolicy,
|
||||||
|
} from '../../types.ts';
|
||||||
|
|
||||||
|
export class BackupRepository extends BaseRepository {
|
||||||
|
// ============ Backup CRUD ============
|
||||||
|
|
||||||
|
create(backup: Omit<IBackup, 'id'>): IBackup {
|
||||||
|
this.query(
|
||||||
|
`INSERT INTO backups (
|
||||||
|
service_id, service_name, filename, size_bytes, created_at,
|
||||||
|
includes_image, platform_resources, checksum, schedule_id
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||||
|
[
|
||||||
|
backup.serviceId,
|
||||||
|
backup.serviceName,
|
||||||
|
backup.filename,
|
||||||
|
backup.sizeBytes,
|
||||||
|
backup.createdAt,
|
||||||
|
backup.includesImage ? 1 : 0,
|
||||||
|
JSON.stringify(backup.platformResources),
|
||||||
|
backup.checksum,
|
||||||
|
backup.scheduleId ?? null,
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get the created backup by looking for the most recent one with matching filename
|
||||||
|
const rows = this.query(
|
||||||
|
'SELECT * FROM backups WHERE filename = ? ORDER BY id DESC LIMIT 1',
|
||||||
|
[backup.filename]
|
||||||
|
);
|
||||||
|
|
||||||
|
return this.rowToBackup(rows[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
getById(id: number): IBackup | null {
|
||||||
|
const rows = this.query('SELECT * FROM backups WHERE id = ?', [id]);
|
||||||
|
return rows.length > 0 ? this.rowToBackup(rows[0]) : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
getByService(serviceId: number): IBackup[] {
|
||||||
|
const rows = this.query(
|
||||||
|
'SELECT * FROM backups WHERE service_id = ? ORDER BY created_at DESC',
|
||||||
|
[serviceId]
|
||||||
|
);
|
||||||
|
return rows.map((row) => this.rowToBackup(row));
|
||||||
|
}
|
||||||
|
|
||||||
|
getAll(): IBackup[] {
|
||||||
|
const rows = this.query('SELECT * FROM backups ORDER BY created_at DESC');
|
||||||
|
return rows.map((row) => this.rowToBackup(row));
|
||||||
|
}
|
||||||
|
|
||||||
|
delete(id: number): void {
|
||||||
|
this.query('DELETE FROM backups WHERE id = ?', [id]);
|
||||||
|
}
|
||||||
|
|
||||||
|
deleteByService(serviceId: number): void {
|
||||||
|
this.query('DELETE FROM backups WHERE service_id = ?', [serviceId]);
|
||||||
|
}
|
||||||
|
|
||||||
|
getBySchedule(scheduleId: number): IBackup[] {
|
||||||
|
const rows = this.query(
|
||||||
|
'SELECT * FROM backups WHERE schedule_id = ? ORDER BY created_at DESC',
|
||||||
|
[scheduleId]
|
||||||
|
);
|
||||||
|
return rows.map((row) => this.rowToBackup(row));
|
||||||
|
}
|
||||||
|
|
||||||
|
private rowToBackup(row: any): IBackup {
|
||||||
|
let platformResources: TPlatformServiceType[] = [];
|
||||||
|
const platformResourcesRaw = row.platform_resources;
|
||||||
|
if (platformResourcesRaw) {
|
||||||
|
try {
|
||||||
|
platformResources = JSON.parse(String(platformResourcesRaw));
|
||||||
|
} catch {
|
||||||
|
platformResources = [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: Number(row.id),
|
||||||
|
serviceId: Number(row.service_id),
|
||||||
|
serviceName: String(row.service_name),
|
||||||
|
filename: String(row.filename),
|
||||||
|
sizeBytes: Number(row.size_bytes),
|
||||||
|
createdAt: Number(row.created_at),
|
||||||
|
includesImage: Boolean(row.includes_image),
|
||||||
|
platformResources,
|
||||||
|
checksum: String(row.checksum),
|
||||||
|
scheduleId: row.schedule_id ? Number(row.schedule_id) : undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============ Backup Schedule CRUD ============
|
||||||
|
|
||||||
|
createSchedule(schedule: Omit<IBackupSchedule, 'id'>): IBackupSchedule {
|
||||||
|
const now = Date.now();
|
||||||
|
this.query(
|
||||||
|
`INSERT INTO backup_schedules (
|
||||||
|
scope_type, scope_pattern, service_id, service_name, cron_expression,
|
||||||
|
retention_hourly, retention_daily, retention_weekly, retention_monthly,
|
||||||
|
enabled, last_run_at, next_run_at, last_status, last_error, created_at, updated_at
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||||
|
[
|
||||||
|
schedule.scopeType,
|
||||||
|
schedule.scopePattern ?? null,
|
||||||
|
schedule.serviceId ?? null,
|
||||||
|
schedule.serviceName ?? null,
|
||||||
|
schedule.cronExpression,
|
||||||
|
schedule.retention.hourly,
|
||||||
|
schedule.retention.daily,
|
||||||
|
schedule.retention.weekly,
|
||||||
|
schedule.retention.monthly,
|
||||||
|
schedule.enabled ? 1 : 0,
|
||||||
|
schedule.lastRunAt,
|
||||||
|
schedule.nextRunAt,
|
||||||
|
schedule.lastStatus,
|
||||||
|
schedule.lastError,
|
||||||
|
now,
|
||||||
|
now,
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get the created schedule by looking for the most recent one with matching scope
|
||||||
|
const rows = this.query(
|
||||||
|
'SELECT * FROM backup_schedules WHERE scope_type = ? AND cron_expression = ? ORDER BY id DESC LIMIT 1',
|
||||||
|
[schedule.scopeType, schedule.cronExpression]
|
||||||
|
);
|
||||||
|
|
||||||
|
return this.rowToSchedule(rows[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
getScheduleById(id: number): IBackupSchedule | null {
|
||||||
|
const rows = this.query('SELECT * FROM backup_schedules WHERE id = ?', [id]);
|
||||||
|
return rows.length > 0 ? this.rowToSchedule(rows[0]) : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
getSchedulesByService(serviceId: number): IBackupSchedule[] {
|
||||||
|
const rows = this.query(
|
||||||
|
'SELECT * FROM backup_schedules WHERE service_id = ? ORDER BY created_at DESC',
|
||||||
|
[serviceId]
|
||||||
|
);
|
||||||
|
return rows.map((row) => this.rowToSchedule(row));
|
||||||
|
}
|
||||||
|
|
||||||
|
getEnabledSchedules(): IBackupSchedule[] {
|
||||||
|
const rows = this.query(
|
||||||
|
'SELECT * FROM backup_schedules WHERE enabled = 1 ORDER BY next_run_at ASC'
|
||||||
|
);
|
||||||
|
return rows.map((row) => this.rowToSchedule(row));
|
||||||
|
}
|
||||||
|
|
||||||
|
getAllSchedules(): IBackupSchedule[] {
|
||||||
|
const rows = this.query('SELECT * FROM backup_schedules ORDER BY created_at DESC');
|
||||||
|
return rows.map((row) => this.rowToSchedule(row));
|
||||||
|
}
|
||||||
|
|
||||||
|
updateSchedule(id: number, updates: IBackupScheduleUpdate & { lastRunAt?: number; nextRunAt?: number; lastStatus?: 'success' | 'failed' | null; lastError?: string | null }): void {
|
||||||
|
const setClauses: string[] = [];
|
||||||
|
const params: (string | number | null)[] = [];
|
||||||
|
|
||||||
|
if (updates.cronExpression !== undefined) {
|
||||||
|
setClauses.push('cron_expression = ?');
|
||||||
|
params.push(updates.cronExpression);
|
||||||
|
}
|
||||||
|
if (updates.retention !== undefined) {
|
||||||
|
setClauses.push('retention_hourly = ?');
|
||||||
|
params.push(updates.retention.hourly);
|
||||||
|
setClauses.push('retention_daily = ?');
|
||||||
|
params.push(updates.retention.daily);
|
||||||
|
setClauses.push('retention_weekly = ?');
|
||||||
|
params.push(updates.retention.weekly);
|
||||||
|
setClauses.push('retention_monthly = ?');
|
||||||
|
params.push(updates.retention.monthly);
|
||||||
|
}
|
||||||
|
if (updates.enabled !== undefined) {
|
||||||
|
setClauses.push('enabled = ?');
|
||||||
|
params.push(updates.enabled ? 1 : 0);
|
||||||
|
}
|
||||||
|
if (updates.lastRunAt !== undefined) {
|
||||||
|
setClauses.push('last_run_at = ?');
|
||||||
|
params.push(updates.lastRunAt);
|
||||||
|
}
|
||||||
|
if (updates.nextRunAt !== undefined) {
|
||||||
|
setClauses.push('next_run_at = ?');
|
||||||
|
params.push(updates.nextRunAt);
|
||||||
|
}
|
||||||
|
if (updates.lastStatus !== undefined) {
|
||||||
|
setClauses.push('last_status = ?');
|
||||||
|
params.push(updates.lastStatus);
|
||||||
|
}
|
||||||
|
if (updates.lastError !== undefined) {
|
||||||
|
setClauses.push('last_error = ?');
|
||||||
|
params.push(updates.lastError);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (setClauses.length === 0) return;
|
||||||
|
|
||||||
|
setClauses.push('updated_at = ?');
|
||||||
|
params.push(Date.now());
|
||||||
|
params.push(id);
|
||||||
|
|
||||||
|
this.query(`UPDATE backup_schedules SET ${setClauses.join(', ')} WHERE id = ?`, params);
|
||||||
|
}
|
||||||
|
|
||||||
|
deleteSchedule(id: number): void {
|
||||||
|
this.query('DELETE FROM backup_schedules WHERE id = ?', [id]);
|
||||||
|
}
|
||||||
|
|
||||||
|
deleteSchedulesByService(serviceId: number): void {
|
||||||
|
this.query('DELETE FROM backup_schedules WHERE service_id = ?', [serviceId]);
|
||||||
|
}
|
||||||
|
|
||||||
|
private rowToSchedule(row: any): IBackupSchedule {
|
||||||
|
return {
|
||||||
|
id: Number(row.id),
|
||||||
|
scopeType: (String(row.scope_type) || 'service') as TBackupScheduleScope,
|
||||||
|
scopePattern: row.scope_pattern ? String(row.scope_pattern) : undefined,
|
||||||
|
serviceId: row.service_id ? Number(row.service_id) : undefined,
|
||||||
|
serviceName: row.service_name ? String(row.service_name) : undefined,
|
||||||
|
cronExpression: String(row.cron_expression),
|
||||||
|
retention: {
|
||||||
|
hourly: Number(row.retention_hourly ?? 0),
|
||||||
|
daily: Number(row.retention_daily ?? 7),
|
||||||
|
weekly: Number(row.retention_weekly ?? 4),
|
||||||
|
monthly: Number(row.retention_monthly ?? 12),
|
||||||
|
} as IRetentionPolicy,
|
||||||
|
enabled: Boolean(row.enabled),
|
||||||
|
lastRunAt: row.last_run_at ? Number(row.last_run_at) : null,
|
||||||
|
nextRunAt: row.next_run_at ? Number(row.next_run_at) : null,
|
||||||
|
lastStatus: row.last_status ? (String(row.last_status) as 'success' | 'failed') : null,
|
||||||
|
lastError: row.last_error ? String(row.last_error) : null,
|
||||||
|
createdAt: Number(row.created_at),
|
||||||
|
updatedAt: Number(row.updated_at),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -8,3 +8,4 @@ export { CertificateRepository } from './certificate.repository.ts';
|
|||||||
export { AuthRepository } from './auth.repository.ts';
|
export { AuthRepository } from './auth.repository.ts';
|
||||||
export { MetricsRepository } from './metrics.repository.ts';
|
export { MetricsRepository } from './metrics.repository.ts';
|
||||||
export { PlatformRepository } from './platform.repository.ts';
|
export { PlatformRepository } from './platform.repository.ts';
|
||||||
|
export { BackupRepository } from './backup.repository.ts';
|
||||||
|
|||||||
@@ -119,6 +119,10 @@ export class ServiceRepository extends BaseRepository {
|
|||||||
fields.push('platform_requirements = ?');
|
fields.push('platform_requirements = ?');
|
||||||
values.push(JSON.stringify(updates.platformRequirements));
|
values.push(JSON.stringify(updates.platformRequirements));
|
||||||
}
|
}
|
||||||
|
if (updates.includeImageInBackup !== undefined) {
|
||||||
|
fields.push('include_image_in_backup = ?');
|
||||||
|
values.push(updates.includeImageInBackup ? 1 : 0);
|
||||||
|
}
|
||||||
|
|
||||||
fields.push('updated_at = ?');
|
fields.push('updated_at = ?');
|
||||||
values.push(Date.now());
|
values.push(Date.now());
|
||||||
@@ -172,6 +176,9 @@ export class ServiceRepository extends BaseRepository {
|
|||||||
autoUpdateOnPush: row.auto_update_on_push ? Boolean(row.auto_update_on_push) : undefined,
|
autoUpdateOnPush: row.auto_update_on_push ? Boolean(row.auto_update_on_push) : undefined,
|
||||||
imageDigest: row.image_digest ? String(row.image_digest) : undefined,
|
imageDigest: row.image_digest ? String(row.image_digest) : undefined,
|
||||||
platformRequirements,
|
platformRequirements,
|
||||||
|
includeImageInBackup: row.include_image_in_backup !== undefined
|
||||||
|
? Boolean(row.include_image_in_backup)
|
||||||
|
: true, // Default to true
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
76
ts/opsserver/classes.opsserver.ts
Normal file
76
ts/opsserver/classes.opsserver.ts
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
import * as plugins from '../plugins.ts';
|
||||||
|
import { logger } from '../logging.ts';
|
||||||
|
import type { Onebox } from '../classes/onebox.ts';
|
||||||
|
import * as handlers from './handlers/index.ts';
|
||||||
|
import { files as bundledFiles } from '../../ts_bundled/bundle.ts';
|
||||||
|
|
||||||
|
export class OpsServer {
|
||||||
|
public oneboxRef: Onebox;
|
||||||
|
public typedrouter = new plugins.typedrequest.TypedRouter();
|
||||||
|
public server!: plugins.typedserver.utilityservers.UtilityWebsiteServer;
|
||||||
|
|
||||||
|
// Handler instances
|
||||||
|
public adminHandler!: handlers.AdminHandler;
|
||||||
|
public statusHandler!: handlers.StatusHandler;
|
||||||
|
public servicesHandler!: handlers.ServicesHandler;
|
||||||
|
public platformHandler!: handlers.PlatformHandler;
|
||||||
|
public sslHandler!: handlers.SslHandler;
|
||||||
|
public domainsHandler!: handlers.DomainsHandler;
|
||||||
|
public dnsHandler!: handlers.DnsHandler;
|
||||||
|
public registryHandler!: handlers.RegistryHandler;
|
||||||
|
public networkHandler!: handlers.NetworkHandler;
|
||||||
|
public backupsHandler!: handlers.BackupsHandler;
|
||||||
|
public schedulesHandler!: handlers.SchedulesHandler;
|
||||||
|
public settingsHandler!: handlers.SettingsHandler;
|
||||||
|
public logsHandler!: handlers.LogsHandler;
|
||||||
|
|
||||||
|
constructor(oneboxRef: Onebox) {
|
||||||
|
this.oneboxRef = oneboxRef;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async start(port = 3000) {
|
||||||
|
this.server = new plugins.typedserver.utilityservers.UtilityWebsiteServer({
|
||||||
|
domain: 'localhost',
|
||||||
|
feedMetadata: undefined,
|
||||||
|
bundledContent: bundledFiles,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Chain typedrouters: server -> opsServer -> individual handlers
|
||||||
|
this.server.typedrouter.addTypedRouter(this.typedrouter);
|
||||||
|
|
||||||
|
// Set up all handlers
|
||||||
|
await this.setupHandlers();
|
||||||
|
|
||||||
|
await this.server.start(port);
|
||||||
|
logger.success(`OpsServer started on http://localhost:${port}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async setupHandlers(): Promise<void> {
|
||||||
|
// AdminHandler requires async initialization for JWT key generation
|
||||||
|
this.adminHandler = new handlers.AdminHandler(this);
|
||||||
|
await this.adminHandler.initialize();
|
||||||
|
|
||||||
|
// All other handlers self-register in their constructors
|
||||||
|
this.statusHandler = new handlers.StatusHandler(this);
|
||||||
|
this.servicesHandler = new handlers.ServicesHandler(this);
|
||||||
|
this.platformHandler = new handlers.PlatformHandler(this);
|
||||||
|
this.sslHandler = new handlers.SslHandler(this);
|
||||||
|
this.domainsHandler = new handlers.DomainsHandler(this);
|
||||||
|
this.dnsHandler = new handlers.DnsHandler(this);
|
||||||
|
this.registryHandler = new handlers.RegistryHandler(this);
|
||||||
|
this.networkHandler = new handlers.NetworkHandler(this);
|
||||||
|
this.backupsHandler = new handlers.BackupsHandler(this);
|
||||||
|
this.schedulesHandler = new handlers.SchedulesHandler(this);
|
||||||
|
this.settingsHandler = new handlers.SettingsHandler(this);
|
||||||
|
this.logsHandler = new handlers.LogsHandler(this);
|
||||||
|
|
||||||
|
logger.success('OpsServer TypedRequest handlers initialized');
|
||||||
|
}
|
||||||
|
|
||||||
|
public async stop() {
|
||||||
|
if (this.server) {
|
||||||
|
await this.server.stop();
|
||||||
|
logger.success('OpsServer stopped');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
175
ts/opsserver/handlers/admin.handler.ts
Normal file
175
ts/opsserver/handlers/admin.handler.ts
Normal file
@@ -0,0 +1,175 @@
|
|||||||
|
import * as plugins from '../../plugins.ts';
|
||||||
|
import { logger } from '../../logging.ts';
|
||||||
|
import type { OpsServer } from '../classes.opsserver.ts';
|
||||||
|
import * as interfaces from '../../../ts_interfaces/index.ts';
|
||||||
|
|
||||||
|
export interface IJwtData {
|
||||||
|
userId: string;
|
||||||
|
status: 'loggedIn' | 'loggedOut';
|
||||||
|
expiresAt: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class AdminHandler {
|
||||||
|
public typedrouter = new plugins.typedrequest.TypedRouter();
|
||||||
|
public smartjwtInstance!: plugins.smartjwt.SmartJwt<IJwtData>;
|
||||||
|
|
||||||
|
constructor(private opsServerRef: OpsServer) {
|
||||||
|
this.opsServerRef.typedrouter.addTypedRouter(this.typedrouter);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async initialize(): Promise<void> {
|
||||||
|
this.smartjwtInstance = new plugins.smartjwt.SmartJwt();
|
||||||
|
await this.smartjwtInstance.init();
|
||||||
|
await this.smartjwtInstance.createNewKeyPair();
|
||||||
|
this.registerHandlers();
|
||||||
|
}
|
||||||
|
|
||||||
|
private registerHandlers(): void {
|
||||||
|
// Login
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_AdminLoginWithUsernameAndPassword>(
|
||||||
|
'adminLoginWithUsernameAndPassword',
|
||||||
|
async (dataArg) => {
|
||||||
|
try {
|
||||||
|
const user = this.opsServerRef.oneboxRef.database.getUserByUsername(dataArg.username);
|
||||||
|
if (!user) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Invalid credentials');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify password (base64 comparison to match existing DB scheme)
|
||||||
|
const passwordHash = btoa(dataArg.password);
|
||||||
|
if (passwordHash !== user.passwordHash) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Invalid credentials');
|
||||||
|
}
|
||||||
|
|
||||||
|
const expiresAt = Date.now() + 24 * 3600 * 1000;
|
||||||
|
const userId = String(user.id || user.username);
|
||||||
|
const jwt = await this.smartjwtInstance.createJWT({
|
||||||
|
userId,
|
||||||
|
status: 'loggedIn',
|
||||||
|
expiresAt,
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`User logged in: ${user.username}`);
|
||||||
|
|
||||||
|
return {
|
||||||
|
identity: {
|
||||||
|
jwt,
|
||||||
|
userId,
|
||||||
|
username: user.username,
|
||||||
|
expiresAt,
|
||||||
|
role: user.role,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof plugins.typedrequest.TypedResponseError) throw error;
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Login failed');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Logout
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_AdminLogout>(
|
||||||
|
'adminLogout',
|
||||||
|
async (_dataArg) => {
|
||||||
|
return { ok: true };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify Identity
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_VerifyIdentity>(
|
||||||
|
'verifyIdentity',
|
||||||
|
async (dataArg) => {
|
||||||
|
if (!dataArg.identity?.jwt) {
|
||||||
|
return { valid: false };
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const jwtData = await this.smartjwtInstance.verifyJWTAndGetData(dataArg.identity.jwt);
|
||||||
|
if (jwtData.expiresAt < Date.now()) return { valid: false };
|
||||||
|
if (jwtData.status !== 'loggedIn') return { valid: false };
|
||||||
|
return {
|
||||||
|
valid: true,
|
||||||
|
identity: {
|
||||||
|
jwt: dataArg.identity.jwt,
|
||||||
|
userId: jwtData.userId,
|
||||||
|
username: dataArg.identity.username,
|
||||||
|
expiresAt: jwtData.expiresAt,
|
||||||
|
role: dataArg.identity.role,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
} catch {
|
||||||
|
return { valid: false };
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Change Password
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_ChangePassword>(
|
||||||
|
'changePassword',
|
||||||
|
async (dataArg) => {
|
||||||
|
await this.requireValidIdentity(dataArg);
|
||||||
|
const user = this.opsServerRef.oneboxRef.database.getUserByUsername(dataArg.identity.username);
|
||||||
|
if (!user) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('User not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
const currentHash = btoa(dataArg.currentPassword);
|
||||||
|
if (currentHash !== user.passwordHash) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Current password is incorrect');
|
||||||
|
}
|
||||||
|
|
||||||
|
const newHash = btoa(dataArg.newPassword);
|
||||||
|
this.opsServerRef.oneboxRef.database.updateUserPassword(user.username, newHash);
|
||||||
|
logger.info(`Password changed for user: ${user.username}`);
|
||||||
|
|
||||||
|
return { ok: true };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async requireValidIdentity(dataArg: { identity: interfaces.data.IIdentity }): Promise<void> {
|
||||||
|
const passed = await this.validIdentityGuard.exec({ identity: dataArg.identity });
|
||||||
|
if (!passed) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Valid identity required');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Guard for valid identity
|
||||||
|
public validIdentityGuard = new plugins.smartguard.Guard<{
|
||||||
|
identity: interfaces.data.IIdentity;
|
||||||
|
}>(
|
||||||
|
async (dataArg) => {
|
||||||
|
if (!dataArg.identity?.jwt) return false;
|
||||||
|
try {
|
||||||
|
const jwtData = await this.smartjwtInstance.verifyJWTAndGetData(dataArg.identity.jwt);
|
||||||
|
if (jwtData.expiresAt < Date.now()) return false;
|
||||||
|
if (jwtData.status !== 'loggedIn') return false;
|
||||||
|
if (dataArg.identity.expiresAt !== jwtData.expiresAt) return false;
|
||||||
|
if (dataArg.identity.userId !== jwtData.userId) return false;
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{ failedHint: 'identity is not valid', name: 'validIdentityGuard' },
|
||||||
|
);
|
||||||
|
|
||||||
|
// Guard for admin identity
|
||||||
|
public adminIdentityGuard = new plugins.smartguard.Guard<{
|
||||||
|
identity: interfaces.data.IIdentity;
|
||||||
|
}>(
|
||||||
|
async (dataArg) => {
|
||||||
|
const isValid = await this.validIdentityGuard.exec(dataArg);
|
||||||
|
if (!isValid) return false;
|
||||||
|
return dataArg.identity.role === 'admin';
|
||||||
|
},
|
||||||
|
{ failedHint: 'user is not admin', name: 'adminIdentityGuard' },
|
||||||
|
);
|
||||||
|
}
|
||||||
100
ts/opsserver/handlers/backups.handler.ts
Normal file
100
ts/opsserver/handlers/backups.handler.ts
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
import * as plugins from '../../plugins.ts';
|
||||||
|
import type { OpsServer } from '../classes.opsserver.ts';
|
||||||
|
import * as interfaces from '../../../ts_interfaces/index.ts';
|
||||||
|
import { requireValidIdentity } from '../helpers/guards.ts';
|
||||||
|
|
||||||
|
export class BackupsHandler {
|
||||||
|
public typedrouter = new plugins.typedrequest.TypedRouter();
|
||||||
|
|
||||||
|
constructor(private opsServerRef: OpsServer) {
|
||||||
|
this.opsServerRef.typedrouter.addTypedRouter(this.typedrouter);
|
||||||
|
this.registerHandlers();
|
||||||
|
}
|
||||||
|
|
||||||
|
private registerHandlers(): void {
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetBackups>(
|
||||||
|
'getBackups',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const backups = this.opsServerRef.oneboxRef.backupManager.listBackups();
|
||||||
|
return { backups };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetBackup>(
|
||||||
|
'getBackup',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const backup = this.opsServerRef.oneboxRef.database.getBackupById(dataArg.backupId);
|
||||||
|
if (!backup) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Backup not found');
|
||||||
|
}
|
||||||
|
return { backup };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_DeleteBackup>(
|
||||||
|
'deleteBackup',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
await this.opsServerRef.oneboxRef.backupManager.deleteBackup(dataArg.backupId);
|
||||||
|
return { ok: true };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_RestoreBackup>(
|
||||||
|
'restoreBackup',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const backupPath = this.opsServerRef.oneboxRef.backupManager.getBackupFilePath(dataArg.backupId);
|
||||||
|
if (!backupPath) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Backup file not found');
|
||||||
|
}
|
||||||
|
const rawResult = await this.opsServerRef.oneboxRef.backupManager.restoreBackup(
|
||||||
|
backupPath,
|
||||||
|
dataArg.options,
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
result: {
|
||||||
|
service: {
|
||||||
|
name: rawResult.service.name,
|
||||||
|
status: rawResult.service.status,
|
||||||
|
},
|
||||||
|
platformResourcesRestored: rawResult.platformResourcesRestored,
|
||||||
|
warnings: rawResult.warnings,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_DownloadBackup>(
|
||||||
|
'downloadBackup',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const backup = this.opsServerRef.oneboxRef.database.getBackupById(dataArg.backupId);
|
||||||
|
if (!backup) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Backup not found');
|
||||||
|
}
|
||||||
|
const filePath = this.opsServerRef.oneboxRef.backupManager.getBackupFilePath(dataArg.backupId);
|
||||||
|
if (!filePath) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Backup file not found');
|
||||||
|
}
|
||||||
|
// Return a download URL that the client can fetch directly
|
||||||
|
return {
|
||||||
|
downloadUrl: `/api/backups/${dataArg.backupId}/download`,
|
||||||
|
filename: backup.filename,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
65
ts/opsserver/handlers/dns.handler.ts
Normal file
65
ts/opsserver/handlers/dns.handler.ts
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
import * as plugins from '../../plugins.ts';
|
||||||
|
import type { OpsServer } from '../classes.opsserver.ts';
|
||||||
|
import * as interfaces from '../../../ts_interfaces/index.ts';
|
||||||
|
import { requireValidIdentity } from '../helpers/guards.ts';
|
||||||
|
|
||||||
|
export class DnsHandler {
|
||||||
|
public typedrouter = new plugins.typedrequest.TypedRouter();
|
||||||
|
|
||||||
|
constructor(private opsServerRef: OpsServer) {
|
||||||
|
this.opsServerRef.typedrouter.addTypedRouter(this.typedrouter);
|
||||||
|
this.registerHandlers();
|
||||||
|
}
|
||||||
|
|
||||||
|
private registerHandlers(): void {
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetDnsRecords>(
|
||||||
|
'getDnsRecords',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const records = this.opsServerRef.oneboxRef.dns.listDNSRecords();
|
||||||
|
return { records };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_CreateDnsRecord>(
|
||||||
|
'createDnsRecord',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
await this.opsServerRef.oneboxRef.dns.addDNSRecord(dataArg.domain, dataArg.value);
|
||||||
|
const records = this.opsServerRef.oneboxRef.dns.listDNSRecords();
|
||||||
|
const record = records.find((r: any) => r.domain === dataArg.domain);
|
||||||
|
return { record: record! };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_DeleteDnsRecord>(
|
||||||
|
'deleteDnsRecord',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
await this.opsServerRef.oneboxRef.dns.removeDNSRecord(dataArg.domain);
|
||||||
|
return { ok: true };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_SyncDns>(
|
||||||
|
'syncDns',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
if (!this.opsServerRef.oneboxRef.dns.isConfigured()) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('DNS manager not configured');
|
||||||
|
}
|
||||||
|
await this.opsServerRef.oneboxRef.dns.syncFromCloudflare();
|
||||||
|
const records = this.opsServerRef.oneboxRef.dns.listDNSRecords();
|
||||||
|
return { records };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
101
ts/opsserver/handlers/domains.handler.ts
Normal file
101
ts/opsserver/handlers/domains.handler.ts
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
import * as plugins from '../../plugins.ts';
|
||||||
|
import type { OpsServer } from '../classes.opsserver.ts';
|
||||||
|
import * as interfaces from '../../../ts_interfaces/index.ts';
|
||||||
|
import { requireValidIdentity } from '../helpers/guards.ts';
|
||||||
|
|
||||||
|
export class DomainsHandler {
|
||||||
|
public typedrouter = new plugins.typedrequest.TypedRouter();
|
||||||
|
|
||||||
|
constructor(private opsServerRef: OpsServer) {
|
||||||
|
this.opsServerRef.typedrouter.addTypedRouter(this.typedrouter);
|
||||||
|
this.registerHandlers();
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildDomainViews(): interfaces.data.IDomainDetail[] {
|
||||||
|
const domains = this.opsServerRef.oneboxRef.database.getAllDomains();
|
||||||
|
const allServices = this.opsServerRef.oneboxRef.database.getAllServices();
|
||||||
|
|
||||||
|
return domains.map((domain: any) => {
|
||||||
|
const certificates = this.opsServerRef.oneboxRef.database.getCertificatesByDomain(domain.id!);
|
||||||
|
const requirements = this.opsServerRef.oneboxRef.database.getCertRequirementsByDomain(domain.id!);
|
||||||
|
|
||||||
|
const serviceCount = allServices.filter((service: any) => {
|
||||||
|
if (!service.domain) return false;
|
||||||
|
const baseDomain = service.domain.split('.').slice(-2).join('.');
|
||||||
|
return baseDomain === domain.domain;
|
||||||
|
}).length;
|
||||||
|
|
||||||
|
let certificateStatus: 'valid' | 'expiring-soon' | 'expired' | 'pending' | 'none' = 'none';
|
||||||
|
let daysRemaining: number | null = null;
|
||||||
|
|
||||||
|
const validCerts = certificates.filter((cert: any) => cert.isValid && cert.expiryDate > Date.now());
|
||||||
|
if (validCerts.length > 0) {
|
||||||
|
const latestCert = validCerts.reduce((latest: any, cert: any) =>
|
||||||
|
cert.expiryDate > latest.expiryDate ? cert : latest
|
||||||
|
);
|
||||||
|
daysRemaining = Math.floor((latestCert.expiryDate - Date.now()) / (24 * 60 * 60 * 1000));
|
||||||
|
certificateStatus = daysRemaining <= 30 ? 'expiring-soon' : 'valid';
|
||||||
|
} else if (certificates.some((cert: any) => !cert.isValid)) {
|
||||||
|
certificateStatus = 'expired';
|
||||||
|
} else if (requirements.some((req: any) => req.status === 'pending')) {
|
||||||
|
certificateStatus = 'pending';
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
domain,
|
||||||
|
certificates,
|
||||||
|
requirements,
|
||||||
|
serviceCount,
|
||||||
|
certificateStatus,
|
||||||
|
daysRemaining,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private registerHandlers(): void {
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetDomains>(
|
||||||
|
'getDomains',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const domains = this.buildDomainViews();
|
||||||
|
return { domains };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetDomain>(
|
||||||
|
'getDomain',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const domain = this.opsServerRef.oneboxRef.database.getDomainByName(dataArg.domainName);
|
||||||
|
if (!domain) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Domain not found');
|
||||||
|
}
|
||||||
|
const views = this.buildDomainViews();
|
||||||
|
const domainView = views.find((v) => v.domain.domain === dataArg.domainName);
|
||||||
|
if (!domainView) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Domain not found');
|
||||||
|
}
|
||||||
|
return { domain: domainView };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_SyncDomains>(
|
||||||
|
'syncDomains',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
if (!this.opsServerRef.oneboxRef.cloudflareDomainSync) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Cloudflare domain sync not configured');
|
||||||
|
}
|
||||||
|
await this.opsServerRef.oneboxRef.cloudflareDomainSync.syncZones();
|
||||||
|
const domains = this.buildDomainViews();
|
||||||
|
return { domains };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
13
ts/opsserver/handlers/index.ts
Normal file
13
ts/opsserver/handlers/index.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
export * from './admin.handler.ts';
|
||||||
|
export * from './status.handler.ts';
|
||||||
|
export * from './services.handler.ts';
|
||||||
|
export * from './platform.handler.ts';
|
||||||
|
export * from './ssl.handler.ts';
|
||||||
|
export * from './domains.handler.ts';
|
||||||
|
export * from './dns.handler.ts';
|
||||||
|
export * from './registry.handler.ts';
|
||||||
|
export * from './network.handler.ts';
|
||||||
|
export * from './backups.handler.ts';
|
||||||
|
export * from './schedules.handler.ts';
|
||||||
|
export * from './settings.handler.ts';
|
||||||
|
export * from './logs.handler.ts';
|
||||||
219
ts/opsserver/handlers/logs.handler.ts
Normal file
219
ts/opsserver/handlers/logs.handler.ts
Normal file
@@ -0,0 +1,219 @@
|
|||||||
|
import * as plugins from '../../plugins.ts';
|
||||||
|
import { logger } from '../../logging.ts';
|
||||||
|
import type { OpsServer } from '../classes.opsserver.ts';
|
||||||
|
import * as interfaces from '../../../ts_interfaces/index.ts';
|
||||||
|
import { requireValidIdentity } from '../helpers/guards.ts';
|
||||||
|
|
||||||
|
export class LogsHandler {
|
||||||
|
public typedrouter = new plugins.typedrequest.TypedRouter();
|
||||||
|
|
||||||
|
constructor(private opsServerRef: OpsServer) {
|
||||||
|
this.opsServerRef.typedrouter.addTypedRouter(this.typedrouter);
|
||||||
|
this.registerHandlers();
|
||||||
|
}
|
||||||
|
|
||||||
|
private registerHandlers(): void {
|
||||||
|
// Service log stream
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServiceLogStream>(
|
||||||
|
'getServiceLogStream',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
|
||||||
|
const service = this.opsServerRef.oneboxRef.database.getServiceByName(dataArg.serviceName);
|
||||||
|
if (!service) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Service not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
const virtualStream = new plugins.typedrequest.VirtualStream<Uint8Array>();
|
||||||
|
const encoder = new TextEncoder();
|
||||||
|
|
||||||
|
// Get container and start streaming in background
|
||||||
|
(async () => {
|
||||||
|
try {
|
||||||
|
let container = await this.opsServerRef.oneboxRef.docker.getContainerById(service.containerID!);
|
||||||
|
if (!container) {
|
||||||
|
// Try finding by service label
|
||||||
|
const containers = await this.opsServerRef.oneboxRef.docker.listAllContainers();
|
||||||
|
const serviceContainer = containers.find((c: any) => {
|
||||||
|
const labels = c.Labels || {};
|
||||||
|
return labels['com.docker.swarm.service.id'] === service.containerID;
|
||||||
|
});
|
||||||
|
if (serviceContainer) {
|
||||||
|
container = await this.opsServerRef.oneboxRef.docker.getContainerById(serviceContainer.Id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!container) {
|
||||||
|
virtualStream.sendData(encoder.encode(JSON.stringify({ error: 'Container not found' })));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const logStream = await container.streamLogs({
|
||||||
|
stdout: true,
|
||||||
|
stderr: true,
|
||||||
|
timestamps: true,
|
||||||
|
tail: 100,
|
||||||
|
});
|
||||||
|
|
||||||
|
let buffer = new Uint8Array(0);
|
||||||
|
|
||||||
|
logStream.on('data', (chunk: Uint8Array) => {
|
||||||
|
// Append to buffer
|
||||||
|
const newBuffer = new Uint8Array(buffer.length + chunk.length);
|
||||||
|
newBuffer.set(buffer);
|
||||||
|
newBuffer.set(chunk, buffer.length);
|
||||||
|
buffer = newBuffer;
|
||||||
|
|
||||||
|
// Process Docker multiplexed frames
|
||||||
|
while (buffer.length >= 8) {
|
||||||
|
const frameSize = (buffer[4] << 24) | (buffer[5] << 16) | (buffer[6] << 8) | buffer[7];
|
||||||
|
if (buffer.length < 8 + frameSize) break;
|
||||||
|
|
||||||
|
const frameData = buffer.slice(8, 8 + frameSize);
|
||||||
|
try {
|
||||||
|
virtualStream.sendData(frameData);
|
||||||
|
} catch {
|
||||||
|
logStream.destroy();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
buffer = buffer.slice(8 + frameSize);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
logStream.on('error', (error: Error) => {
|
||||||
|
logger.error(`Log stream error for ${dataArg.serviceName}: ${error.message}`);
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to start log stream: ${error}`);
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
|
||||||
|
return { logStream: virtualStream as any };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Platform service log stream
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetPlatformServiceLogStream>(
|
||||||
|
'getPlatformServiceLogStream',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
|
||||||
|
const platformService = this.opsServerRef.oneboxRef.database.getPlatformServiceByType(
|
||||||
|
dataArg.serviceType,
|
||||||
|
);
|
||||||
|
if (!platformService || !platformService.containerId) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Platform service has no container');
|
||||||
|
}
|
||||||
|
|
||||||
|
const virtualStream = new plugins.typedrequest.VirtualStream<Uint8Array>();
|
||||||
|
|
||||||
|
(async () => {
|
||||||
|
try {
|
||||||
|
const container = await this.opsServerRef.oneboxRef.docker.getContainerById(
|
||||||
|
platformService.containerId!,
|
||||||
|
);
|
||||||
|
if (!container) return;
|
||||||
|
|
||||||
|
const logStream = await container.streamLogs({
|
||||||
|
stdout: true,
|
||||||
|
stderr: true,
|
||||||
|
timestamps: true,
|
||||||
|
tail: 100,
|
||||||
|
});
|
||||||
|
|
||||||
|
let buffer = new Uint8Array(0);
|
||||||
|
|
||||||
|
logStream.on('data', (chunk: Uint8Array) => {
|
||||||
|
const newBuffer = new Uint8Array(buffer.length + chunk.length);
|
||||||
|
newBuffer.set(buffer);
|
||||||
|
newBuffer.set(chunk, buffer.length);
|
||||||
|
buffer = newBuffer;
|
||||||
|
|
||||||
|
while (buffer.length >= 8) {
|
||||||
|
const frameSize = (buffer[4] << 24) | (buffer[5] << 16) | (buffer[6] << 8) | buffer[7];
|
||||||
|
if (buffer.length < 8 + frameSize) break;
|
||||||
|
const frameData = buffer.slice(8, 8 + frameSize);
|
||||||
|
try {
|
||||||
|
virtualStream.sendData(frameData);
|
||||||
|
} catch {
|
||||||
|
logStream.destroy();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
buffer = buffer.slice(8 + frameSize);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to start platform log stream: ${error}`);
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
|
||||||
|
return { logStream: virtualStream as any };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Network log stream
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetNetworkLogStream>(
|
||||||
|
'getNetworkLogStream',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
|
||||||
|
const virtualStream = new plugins.typedrequest.VirtualStream<Uint8Array>();
|
||||||
|
const encoder = new TextEncoder();
|
||||||
|
const clientId = crypto.randomUUID();
|
||||||
|
|
||||||
|
// Create a mock WebSocket-like object for the CaddyLogReceiver
|
||||||
|
const mockSocket = {
|
||||||
|
readyState: 1, // WebSocket.OPEN
|
||||||
|
send: (data: string) => {
|
||||||
|
try {
|
||||||
|
virtualStream.sendData(encoder.encode(data));
|
||||||
|
} catch {
|
||||||
|
this.opsServerRef.oneboxRef.caddyLogReceiver.removeClient(clientId);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const filter = dataArg.filter || {};
|
||||||
|
this.opsServerRef.oneboxRef.caddyLogReceiver.addClient(
|
||||||
|
clientId,
|
||||||
|
mockSocket as any,
|
||||||
|
filter,
|
||||||
|
);
|
||||||
|
|
||||||
|
return { logStream: virtualStream as any };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Event stream (general updates)
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetEventStream>(
|
||||||
|
'getEventStream',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
|
||||||
|
const virtualStream = new plugins.typedrequest.VirtualStream<Uint8Array>();
|
||||||
|
const encoder = new TextEncoder();
|
||||||
|
|
||||||
|
// Send initial connection message
|
||||||
|
virtualStream.sendData(
|
||||||
|
encoder.encode(
|
||||||
|
JSON.stringify({
|
||||||
|
type: 'connected',
|
||||||
|
message: 'Connected to Onebox event stream',
|
||||||
|
timestamp: Date.now(),
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
return { eventStream: virtualStream as any };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
123
ts/opsserver/handlers/network.handler.ts
Normal file
123
ts/opsserver/handlers/network.handler.ts
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
import * as plugins from '../../plugins.ts';
|
||||||
|
import type { OpsServer } from '../classes.opsserver.ts';
|
||||||
|
import * as interfaces from '../../../ts_interfaces/index.ts';
|
||||||
|
import { requireValidIdentity } from '../helpers/guards.ts';
|
||||||
|
import type { TPlatformServiceType } from '../../types.ts';
|
||||||
|
|
||||||
|
export class NetworkHandler {
|
||||||
|
public typedrouter = new plugins.typedrequest.TypedRouter();
|
||||||
|
|
||||||
|
constructor(private opsServerRef: OpsServer) {
|
||||||
|
this.opsServerRef.typedrouter.addTypedRouter(this.typedrouter);
|
||||||
|
this.registerHandlers();
|
||||||
|
}
|
||||||
|
|
||||||
|
private getPlatformServicePort(type: TPlatformServiceType): number {
|
||||||
|
const ports: Record<TPlatformServiceType, number> = {
|
||||||
|
mongodb: 27017,
|
||||||
|
minio: 9000,
|
||||||
|
redis: 6379,
|
||||||
|
postgresql: 5432,
|
||||||
|
rabbitmq: 5672,
|
||||||
|
caddy: 80,
|
||||||
|
clickhouse: 8123,
|
||||||
|
};
|
||||||
|
return ports[type] || 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
private registerHandlers(): void {
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetNetworkTargets>(
|
||||||
|
'getNetworkTargets',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const targets: interfaces.data.INetworkTarget[] = [];
|
||||||
|
|
||||||
|
// Services
|
||||||
|
const services = this.opsServerRef.oneboxRef.services.listServices();
|
||||||
|
for (const svc of services) {
|
||||||
|
targets.push({
|
||||||
|
type: 'service',
|
||||||
|
name: svc.name,
|
||||||
|
domain: svc.domain || null,
|
||||||
|
targetHost: (svc as any).containerIP || svc.containerID || 'unknown',
|
||||||
|
targetPort: svc.port || 80,
|
||||||
|
status: svc.status,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Registry
|
||||||
|
const registryStatus = this.opsServerRef.oneboxRef.registry.getStatus();
|
||||||
|
if (registryStatus.running) {
|
||||||
|
targets.push({
|
||||||
|
type: 'registry',
|
||||||
|
name: 'onebox-registry',
|
||||||
|
domain: null,
|
||||||
|
targetHost: 'localhost',
|
||||||
|
targetPort: registryStatus.port,
|
||||||
|
status: 'running',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Platform services
|
||||||
|
const platformServices = this.opsServerRef.oneboxRef.platformServices.getAllPlatformServices();
|
||||||
|
for (const ps of platformServices) {
|
||||||
|
const provider = this.opsServerRef.oneboxRef.platformServices.getProvider(ps.type);
|
||||||
|
targets.push({
|
||||||
|
type: 'platform',
|
||||||
|
name: provider?.displayName || ps.type,
|
||||||
|
domain: null,
|
||||||
|
targetHost: 'localhost',
|
||||||
|
targetPort: this.getPlatformServicePort(ps.type),
|
||||||
|
status: ps.status,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return { targets };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetNetworkStats>(
|
||||||
|
'getNetworkStats',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const proxyStatus = this.opsServerRef.oneboxRef.reverseProxy.getStatus() as any;
|
||||||
|
const logReceiverStats = this.opsServerRef.oneboxRef.caddyLogReceiver.getStats();
|
||||||
|
|
||||||
|
return {
|
||||||
|
stats: {
|
||||||
|
proxy: {
|
||||||
|
running: proxyStatus.running ?? proxyStatus.http?.running ?? false,
|
||||||
|
httpPort: proxyStatus.httpPort ?? proxyStatus.http?.port ?? 80,
|
||||||
|
httpsPort: proxyStatus.httpsPort ?? proxyStatus.https?.port ?? 443,
|
||||||
|
routes: proxyStatus.routes ?? 0,
|
||||||
|
certificates: proxyStatus.certificates ?? proxyStatus.https?.certificates ?? 0,
|
||||||
|
},
|
||||||
|
logReceiver: {
|
||||||
|
running: logReceiverStats.running,
|
||||||
|
port: logReceiverStats.port,
|
||||||
|
clients: logReceiverStats.clients,
|
||||||
|
connections: logReceiverStats.connections,
|
||||||
|
sampleRate: logReceiverStats.sampleRate,
|
||||||
|
recentLogsCount: logReceiverStats.recentLogsCount,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetTrafficStats>(
|
||||||
|
'getTrafficStats',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const trafficStats = this.opsServerRef.oneboxRef.caddyLogReceiver.getTrafficStats(60);
|
||||||
|
return { stats: trafficStats };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
169
ts/opsserver/handlers/platform.handler.ts
Normal file
169
ts/opsserver/handlers/platform.handler.ts
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
import * as plugins from '../../plugins.ts';
|
||||||
|
import { logger } from '../../logging.ts';
|
||||||
|
import type { OpsServer } from '../classes.opsserver.ts';
|
||||||
|
import * as interfaces from '../../../ts_interfaces/index.ts';
|
||||||
|
import { requireValidIdentity } from '../helpers/guards.ts';
|
||||||
|
|
||||||
|
export class PlatformHandler {
|
||||||
|
public typedrouter = new plugins.typedrequest.TypedRouter();
|
||||||
|
|
||||||
|
constructor(private opsServerRef: OpsServer) {
|
||||||
|
this.opsServerRef.typedrouter.addTypedRouter(this.typedrouter);
|
||||||
|
this.registerHandlers();
|
||||||
|
}
|
||||||
|
|
||||||
|
private registerHandlers(): void {
|
||||||
|
// Get all platform services
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetPlatformServices>(
|
||||||
|
'getPlatformServices',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const platformServices = this.opsServerRef.oneboxRef.platformServices.getAllPlatformServices();
|
||||||
|
const providers = this.opsServerRef.oneboxRef.platformServices.getAllProviders();
|
||||||
|
|
||||||
|
const result = providers.map((provider: any) => {
|
||||||
|
const service = platformServices.find((s: any) => s.type === provider.type);
|
||||||
|
const isCore = 'isCore' in provider && (provider as any).isCore === true;
|
||||||
|
|
||||||
|
let status: string = service?.status || 'not-deployed';
|
||||||
|
if (provider.type === 'caddy') {
|
||||||
|
const proxyStatus = this.opsServerRef.oneboxRef.reverseProxy.getStatus() as any;
|
||||||
|
status = (proxyStatus.running ?? proxyStatus.http?.running) ? 'running' : 'stopped';
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: provider.type,
|
||||||
|
displayName: provider.displayName,
|
||||||
|
resourceTypes: provider.resourceTypes,
|
||||||
|
status: status as interfaces.data.TPlatformServiceStatus,
|
||||||
|
containerId: service?.containerId,
|
||||||
|
isCore,
|
||||||
|
createdAt: service?.createdAt,
|
||||||
|
updatedAt: service?.updatedAt,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
return { platformServices: result as interfaces.data.IPlatformService[] };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get specific platform service
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetPlatformService>(
|
||||||
|
'getPlatformService',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const provider = this.opsServerRef.oneboxRef.platformServices.getProvider(dataArg.serviceType);
|
||||||
|
if (!provider) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError(`Unknown platform service type: ${dataArg.serviceType}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const service = this.opsServerRef.oneboxRef.database.getPlatformServiceByType(dataArg.serviceType);
|
||||||
|
const isCore = 'isCore' in provider && (provider as any).isCore === true;
|
||||||
|
|
||||||
|
let rawStatus: string = service?.status || 'not-deployed';
|
||||||
|
if (dataArg.serviceType === 'caddy') {
|
||||||
|
const proxyStatus = this.opsServerRef.oneboxRef.reverseProxy.getStatus() as any;
|
||||||
|
rawStatus = (proxyStatus.running ?? proxyStatus.http?.running) ? 'running' : 'stopped';
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
platformService: {
|
||||||
|
type: provider.type,
|
||||||
|
displayName: provider.displayName,
|
||||||
|
resourceTypes: provider.resourceTypes,
|
||||||
|
status: rawStatus as interfaces.data.TPlatformServiceStatus,
|
||||||
|
containerId: service?.containerId,
|
||||||
|
isCore,
|
||||||
|
createdAt: service?.createdAt,
|
||||||
|
updatedAt: service?.updatedAt,
|
||||||
|
} as interfaces.data.IPlatformService,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Start platform service
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_StartPlatformService>(
|
||||||
|
'startPlatformService',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const provider = this.opsServerRef.oneboxRef.platformServices.getProvider(dataArg.serviceType);
|
||||||
|
if (!provider) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError(`Unknown platform service type: ${dataArg.serviceType}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`Starting platform service: ${dataArg.serviceType}`);
|
||||||
|
const service = await this.opsServerRef.oneboxRef.platformServices.ensureRunning(dataArg.serviceType);
|
||||||
|
|
||||||
|
return {
|
||||||
|
platformService: {
|
||||||
|
type: service.type,
|
||||||
|
displayName: provider.displayName,
|
||||||
|
resourceTypes: provider.resourceTypes,
|
||||||
|
status: service.status,
|
||||||
|
containerId: service.containerId,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Stop platform service
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_StopPlatformService>(
|
||||||
|
'stopPlatformService',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const provider = this.opsServerRef.oneboxRef.platformServices.getProvider(dataArg.serviceType);
|
||||||
|
if (!provider) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError(`Unknown platform service type: ${dataArg.serviceType}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const isCore = 'isCore' in provider && (provider as any).isCore === true;
|
||||||
|
if (isCore) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError(
|
||||||
|
`${provider.displayName} is a core service and cannot be stopped`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`Stopping platform service: ${dataArg.serviceType}`);
|
||||||
|
await this.opsServerRef.oneboxRef.platformServices.stopPlatformService(dataArg.serviceType);
|
||||||
|
|
||||||
|
return {
|
||||||
|
platformService: {
|
||||||
|
type: dataArg.serviceType,
|
||||||
|
displayName: provider.displayName,
|
||||||
|
resourceTypes: provider.resourceTypes,
|
||||||
|
status: 'stopped' as const,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get platform service stats
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetPlatformServiceStats>(
|
||||||
|
'getPlatformServiceStats',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const service = this.opsServerRef.oneboxRef.database.getPlatformServiceByType(dataArg.serviceType);
|
||||||
|
if (!service || !service.containerId) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Platform service has no container');
|
||||||
|
}
|
||||||
|
|
||||||
|
const stats = await this.opsServerRef.oneboxRef.docker.getContainerStats(service.containerId);
|
||||||
|
if (!stats) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Could not retrieve container stats');
|
||||||
|
}
|
||||||
|
|
||||||
|
return { stats };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
147
ts/opsserver/handlers/registry.handler.ts
Normal file
147
ts/opsserver/handlers/registry.handler.ts
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
import * as plugins from '../../plugins.ts';
|
||||||
|
import type { OpsServer } from '../classes.opsserver.ts';
|
||||||
|
import * as interfaces from '../../../ts_interfaces/index.ts';
|
||||||
|
import { requireValidIdentity } from '../helpers/guards.ts';
|
||||||
|
|
||||||
|
export class RegistryHandler {
|
||||||
|
public typedrouter = new plugins.typedrequest.TypedRouter();
|
||||||
|
|
||||||
|
constructor(private opsServerRef: OpsServer) {
|
||||||
|
this.opsServerRef.typedrouter.addTypedRouter(this.typedrouter);
|
||||||
|
this.registerHandlers();
|
||||||
|
}
|
||||||
|
|
||||||
|
private registerHandlers(): void {
|
||||||
|
// Get registry tags
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetRegistryTags>(
|
||||||
|
'getRegistryTags',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const tags = await this.opsServerRef.oneboxRef.registry.getImageTags(dataArg.serviceName);
|
||||||
|
return { tags };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get registry tokens
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetRegistryTokens>(
|
||||||
|
'getRegistryTokens',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const rawTokens = this.opsServerRef.oneboxRef.database.getAllRegistryTokens();
|
||||||
|
const now = Date.now();
|
||||||
|
|
||||||
|
const tokens = rawTokens.map((token: any) => {
|
||||||
|
const isExpired = token.expiresAt !== null && token.expiresAt < now;
|
||||||
|
let scopeDisplay: string;
|
||||||
|
if (token.scope === 'all') {
|
||||||
|
scopeDisplay = 'All services';
|
||||||
|
} else if (Array.isArray(token.scope)) {
|
||||||
|
scopeDisplay = token.scope.length === 1 ? token.scope[0] : `${token.scope.length} services`;
|
||||||
|
} else {
|
||||||
|
scopeDisplay = 'Unknown';
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: token.id!,
|
||||||
|
name: token.name,
|
||||||
|
type: token.type,
|
||||||
|
scope: token.scope,
|
||||||
|
scopeDisplay,
|
||||||
|
expiresAt: token.expiresAt,
|
||||||
|
createdAt: token.createdAt,
|
||||||
|
lastUsedAt: token.lastUsedAt,
|
||||||
|
createdBy: token.createdBy,
|
||||||
|
isExpired,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
return { tokens };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Create registry token
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_CreateRegistryToken>(
|
||||||
|
'createRegistryToken',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const config = dataArg.tokenConfig;
|
||||||
|
|
||||||
|
// Calculate expiration
|
||||||
|
const now = Date.now();
|
||||||
|
let expiresAt: number | null = null;
|
||||||
|
if (config.expiresIn !== 'never') {
|
||||||
|
const daysMap: Record<string, number> = { '30d': 30, '90d': 90, '365d': 365 };
|
||||||
|
const days = daysMap[config.expiresIn];
|
||||||
|
if (days) expiresAt = now + days * 24 * 60 * 60 * 1000;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate token
|
||||||
|
const plainToken = crypto.randomUUID() + crypto.randomUUID();
|
||||||
|
const encoder = new TextEncoder();
|
||||||
|
const hashBuffer = await crypto.subtle.digest('SHA-256', encoder.encode(plainToken));
|
||||||
|
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
||||||
|
const tokenHash = hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');
|
||||||
|
|
||||||
|
const token = this.opsServerRef.oneboxRef.database.createRegistryToken({
|
||||||
|
name: config.name,
|
||||||
|
tokenHash,
|
||||||
|
type: config.type,
|
||||||
|
scope: config.scope,
|
||||||
|
expiresAt,
|
||||||
|
createdAt: now,
|
||||||
|
lastUsedAt: null,
|
||||||
|
createdBy: dataArg.identity.username,
|
||||||
|
});
|
||||||
|
|
||||||
|
let scopeDisplay: string;
|
||||||
|
if (token.scope === 'all') {
|
||||||
|
scopeDisplay = 'All services';
|
||||||
|
} else if (Array.isArray(token.scope)) {
|
||||||
|
scopeDisplay = token.scope.length === 1 ? token.scope[0] : `${token.scope.length} services`;
|
||||||
|
} else {
|
||||||
|
scopeDisplay = 'Unknown';
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
result: {
|
||||||
|
token: {
|
||||||
|
id: token.id!,
|
||||||
|
name: token.name,
|
||||||
|
type: token.type,
|
||||||
|
scope: token.scope,
|
||||||
|
scopeDisplay,
|
||||||
|
expiresAt: token.expiresAt,
|
||||||
|
createdAt: token.createdAt,
|
||||||
|
lastUsedAt: token.lastUsedAt,
|
||||||
|
createdBy: token.createdBy,
|
||||||
|
isExpired: false,
|
||||||
|
},
|
||||||
|
plainToken,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Delete registry token
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_DeleteRegistryToken>(
|
||||||
|
'deleteRegistryToken',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const token = this.opsServerRef.oneboxRef.database.getRegistryTokenById(dataArg.tokenId);
|
||||||
|
if (!token) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Token not found');
|
||||||
|
}
|
||||||
|
this.opsServerRef.oneboxRef.database.deleteRegistryToken(dataArg.tokenId);
|
||||||
|
return { ok: true };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
93
ts/opsserver/handlers/schedules.handler.ts
Normal file
93
ts/opsserver/handlers/schedules.handler.ts
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
import * as plugins from '../../plugins.ts';
|
||||||
|
import type { OpsServer } from '../classes.opsserver.ts';
|
||||||
|
import * as interfaces from '../../../ts_interfaces/index.ts';
|
||||||
|
import { requireValidIdentity } from '../helpers/guards.ts';
|
||||||
|
|
||||||
|
export class SchedulesHandler {
|
||||||
|
public typedrouter = new plugins.typedrequest.TypedRouter();
|
||||||
|
|
||||||
|
constructor(private opsServerRef: OpsServer) {
|
||||||
|
this.opsServerRef.typedrouter.addTypedRouter(this.typedrouter);
|
||||||
|
this.registerHandlers();
|
||||||
|
}
|
||||||
|
|
||||||
|
private registerHandlers(): void {
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetBackupSchedules>(
|
||||||
|
'getBackupSchedules',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const schedules = this.opsServerRef.oneboxRef.backupScheduler.getAllSchedules();
|
||||||
|
return { schedules };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_CreateBackupSchedule>(
|
||||||
|
'createBackupSchedule',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const schedule = await this.opsServerRef.oneboxRef.backupScheduler.createSchedule(
|
||||||
|
dataArg.scheduleConfig,
|
||||||
|
);
|
||||||
|
return { schedule };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetBackupSchedule>(
|
||||||
|
'getBackupSchedule',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const schedule = this.opsServerRef.oneboxRef.backupScheduler.getScheduleById(dataArg.scheduleId);
|
||||||
|
if (!schedule) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Schedule not found');
|
||||||
|
}
|
||||||
|
return { schedule };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_UpdateBackupSchedule>(
|
||||||
|
'updateBackupSchedule',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const schedule = await this.opsServerRef.oneboxRef.backupScheduler.updateSchedule(
|
||||||
|
dataArg.scheduleId,
|
||||||
|
dataArg.updates,
|
||||||
|
);
|
||||||
|
return { schedule };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_DeleteBackupSchedule>(
|
||||||
|
'deleteBackupSchedule',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
await this.opsServerRef.oneboxRef.backupScheduler.deleteSchedule(dataArg.scheduleId);
|
||||||
|
return { ok: true };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_TriggerBackupSchedule>(
|
||||||
|
'triggerBackupSchedule',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
await this.opsServerRef.oneboxRef.backupScheduler.triggerBackup(dataArg.scheduleId);
|
||||||
|
// triggerBackup is void; the backup is created async by the scheduler
|
||||||
|
// Return the most recent backup for the schedule
|
||||||
|
const allBackups = this.opsServerRef.oneboxRef.backupManager.listBackups();
|
||||||
|
const latestBackup = allBackups.find((b: any) => b.scheduleId === dataArg.scheduleId);
|
||||||
|
return { backup: latestBackup! };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
244
ts/opsserver/handlers/services.handler.ts
Normal file
244
ts/opsserver/handlers/services.handler.ts
Normal file
@@ -0,0 +1,244 @@
|
|||||||
|
import * as plugins from '../../plugins.ts';
|
||||||
|
import { logger } from '../../logging.ts';
|
||||||
|
import type { OpsServer } from '../classes.opsserver.ts';
|
||||||
|
import * as interfaces from '../../../ts_interfaces/index.ts';
|
||||||
|
import { requireValidIdentity } from '../helpers/guards.ts';
|
||||||
|
|
||||||
|
export class ServicesHandler {
|
||||||
|
public typedrouter = new plugins.typedrequest.TypedRouter();
|
||||||
|
|
||||||
|
constructor(private opsServerRef: OpsServer) {
|
||||||
|
this.opsServerRef.typedrouter.addTypedRouter(this.typedrouter);
|
||||||
|
this.registerHandlers();
|
||||||
|
}
|
||||||
|
|
||||||
|
private registerHandlers(): void {
|
||||||
|
// Get all services
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServices>(
|
||||||
|
'getServices',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const services = this.opsServerRef.oneboxRef.services.listServices();
|
||||||
|
return { services };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get single service
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetService>(
|
||||||
|
'getService',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName);
|
||||||
|
if (!service) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Service not found');
|
||||||
|
}
|
||||||
|
return { service };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Create service
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_CreateService>(
|
||||||
|
'createService',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const service = await this.opsServerRef.oneboxRef.services.deployService(dataArg.serviceConfig);
|
||||||
|
return { service };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Update service
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_UpdateService>(
|
||||||
|
'updateService',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const service = await this.opsServerRef.oneboxRef.services.updateService(
|
||||||
|
dataArg.serviceName,
|
||||||
|
dataArg.updates,
|
||||||
|
);
|
||||||
|
return { service };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Delete service
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_DeleteService>(
|
||||||
|
'deleteService',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
await this.opsServerRef.oneboxRef.services.removeService(dataArg.serviceName);
|
||||||
|
return { ok: true };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Start service
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_StartService>(
|
||||||
|
'startService',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
await this.opsServerRef.oneboxRef.services.startService(dataArg.serviceName);
|
||||||
|
const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName);
|
||||||
|
return { service: service! };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Stop service
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_StopService>(
|
||||||
|
'stopService',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
await this.opsServerRef.oneboxRef.services.stopService(dataArg.serviceName);
|
||||||
|
const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName);
|
||||||
|
return { service: service! };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Restart service
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_RestartService>(
|
||||||
|
'restartService',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
await this.opsServerRef.oneboxRef.services.restartService(dataArg.serviceName);
|
||||||
|
const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName);
|
||||||
|
return { service: service! };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get service logs
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServiceLogs>(
|
||||||
|
'getServiceLogs',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const logs = await this.opsServerRef.oneboxRef.services.getServiceLogs(dataArg.serviceName);
|
||||||
|
return { logs: String(logs) };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get service stats
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServiceStats>(
|
||||||
|
'getServiceStats',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName);
|
||||||
|
if (!service || !service.containerID) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Service has no container');
|
||||||
|
}
|
||||||
|
const stats = await this.opsServerRef.oneboxRef.docker.getContainerStats(service.containerID);
|
||||||
|
if (!stats) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Could not retrieve container stats');
|
||||||
|
}
|
||||||
|
return { stats };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get service metrics
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServiceMetrics>(
|
||||||
|
'getServiceMetrics',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName);
|
||||||
|
if (!service || !service.id) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Service not found');
|
||||||
|
}
|
||||||
|
const metrics = this.opsServerRef.oneboxRef.database.getMetrics(service.id, dataArg.limit || 60);
|
||||||
|
return { metrics };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get service platform resources
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServicePlatformResources>(
|
||||||
|
'getServicePlatformResources',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const rawResources = await this.opsServerRef.oneboxRef.services.getServicePlatformResources(
|
||||||
|
dataArg.serviceName,
|
||||||
|
);
|
||||||
|
const resources = rawResources.map((r: any) => ({
|
||||||
|
id: r.resource.id,
|
||||||
|
resourceType: r.resource.resourceType,
|
||||||
|
resourceName: r.resource.resourceName,
|
||||||
|
platformService: {
|
||||||
|
type: r.platformService.type,
|
||||||
|
name: r.platformService.name,
|
||||||
|
status: r.platformService.status,
|
||||||
|
},
|
||||||
|
envVars: Object.keys(r.credentials).reduce((acc: Record<string, string>, key: string) => {
|
||||||
|
const value = r.credentials[key];
|
||||||
|
if (key.toLowerCase().includes('password') || key.toLowerCase().includes('secret')) {
|
||||||
|
acc[key] = '********';
|
||||||
|
} else {
|
||||||
|
acc[key] = value;
|
||||||
|
}
|
||||||
|
return acc;
|
||||||
|
}, {}),
|
||||||
|
createdAt: r.resource.createdAt,
|
||||||
|
}));
|
||||||
|
return { resources };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get service backups
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServiceBackups>(
|
||||||
|
'getServiceBackups',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const backups = this.opsServerRef.oneboxRef.backupManager.listBackups(dataArg.serviceName);
|
||||||
|
return { backups };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Create service backup
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_CreateServiceBackup>(
|
||||||
|
'createServiceBackup',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const result = await this.opsServerRef.oneboxRef.backupManager.createBackup(dataArg.serviceName);
|
||||||
|
return { backup: result.backup };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get service backup schedules
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServiceBackupSchedules>(
|
||||||
|
'getServiceBackupSchedules',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName);
|
||||||
|
if (!service) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Service not found');
|
||||||
|
}
|
||||||
|
const schedules = this.opsServerRef.oneboxRef.backupScheduler.getSchedulesForService(
|
||||||
|
dataArg.serviceName,
|
||||||
|
);
|
||||||
|
return { schedules };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
86
ts/opsserver/handlers/settings.handler.ts
Normal file
86
ts/opsserver/handlers/settings.handler.ts
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
import * as plugins from '../../plugins.ts';
|
||||||
|
import type { OpsServer } from '../classes.opsserver.ts';
|
||||||
|
import * as interfaces from '../../../ts_interfaces/index.ts';
|
||||||
|
import { requireValidIdentity } from '../helpers/guards.ts';
|
||||||
|
|
||||||
|
export class SettingsHandler {
|
||||||
|
public typedrouter = new plugins.typedrequest.TypedRouter();
|
||||||
|
|
||||||
|
constructor(private opsServerRef: OpsServer) {
|
||||||
|
this.opsServerRef.typedrouter.addTypedRouter(this.typedrouter);
|
||||||
|
this.registerHandlers();
|
||||||
|
}
|
||||||
|
|
||||||
|
private getSettingsObject(): interfaces.data.ISettings {
|
||||||
|
const db = this.opsServerRef.oneboxRef.database;
|
||||||
|
const settingsMap = db.getAllSettings(); // Returns Record<string, string>
|
||||||
|
|
||||||
|
return {
|
||||||
|
cloudflareToken: settingsMap['cloudflareToken'] || '',
|
||||||
|
cloudflareZoneId: settingsMap['cloudflareZoneId'] || '',
|
||||||
|
autoRenewCerts: settingsMap['autoRenewCerts'] === 'true',
|
||||||
|
renewalThreshold: parseInt(settingsMap['renewalThreshold'] || '30', 10),
|
||||||
|
acmeEmail: settingsMap['acmeEmail'] || '',
|
||||||
|
httpPort: parseInt(settingsMap['httpPort'] || '80', 10),
|
||||||
|
httpsPort: parseInt(settingsMap['httpsPort'] || '443', 10),
|
||||||
|
forceHttps: settingsMap['forceHttps'] === 'true',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private registerHandlers(): void {
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetSettings>(
|
||||||
|
'getSettings',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const settings = this.getSettingsObject();
|
||||||
|
return { settings };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_UpdateSettings>(
|
||||||
|
'updateSettings',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const db = this.opsServerRef.oneboxRef.database;
|
||||||
|
const updates = dataArg.settings;
|
||||||
|
|
||||||
|
// Store each setting as key-value pair
|
||||||
|
for (const [key, value] of Object.entries(updates)) {
|
||||||
|
if (value !== undefined) {
|
||||||
|
db.setSetting(key, String(value));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const settings = this.getSettingsObject();
|
||||||
|
return { settings };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_SetBackupPassword>(
|
||||||
|
'setBackupPassword',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
this.opsServerRef.oneboxRef.database.setSetting('backupPassword', dataArg.password);
|
||||||
|
return { ok: true };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetBackupPasswordStatus>(
|
||||||
|
'getBackupPasswordStatus',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const backupPassword = this.opsServerRef.oneboxRef.database.getSetting('backupPassword');
|
||||||
|
const isConfigured = !!backupPassword;
|
||||||
|
return { status: { isConfigured } };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
64
ts/opsserver/handlers/ssl.handler.ts
Normal file
64
ts/opsserver/handlers/ssl.handler.ts
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
import * as plugins from '../../plugins.ts';
|
||||||
|
import type { OpsServer } from '../classes.opsserver.ts';
|
||||||
|
import * as interfaces from '../../../ts_interfaces/index.ts';
|
||||||
|
import { requireValidIdentity } from '../helpers/guards.ts';
|
||||||
|
|
||||||
|
export class SslHandler {
|
||||||
|
public typedrouter = new plugins.typedrequest.TypedRouter();
|
||||||
|
|
||||||
|
constructor(private opsServerRef: OpsServer) {
|
||||||
|
this.opsServerRef.typedrouter.addTypedRouter(this.typedrouter);
|
||||||
|
this.registerHandlers();
|
||||||
|
}
|
||||||
|
|
||||||
|
private registerHandlers(): void {
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_ObtainCertificate>(
|
||||||
|
'obtainCertificate',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
await this.opsServerRef.oneboxRef.ssl.obtainCertificate(dataArg.domain, false);
|
||||||
|
const certificate = this.opsServerRef.oneboxRef.ssl.getCertificate(dataArg.domain);
|
||||||
|
return { certificate: certificate as unknown as interfaces.data.ICertificate };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_ListCertificates>(
|
||||||
|
'listCertificates',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const certificates = this.opsServerRef.oneboxRef.ssl.listCertificates();
|
||||||
|
return { certificates: certificates as unknown as interfaces.data.ICertificate[] };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetCertificate>(
|
||||||
|
'getCertificate',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const certificate = this.opsServerRef.oneboxRef.ssl.getCertificate(dataArg.domain);
|
||||||
|
if (!certificate) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Certificate not found');
|
||||||
|
}
|
||||||
|
return { certificate: certificate as unknown as interfaces.data.ICertificate };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_RenewCertificate>(
|
||||||
|
'renewCertificate',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
await this.opsServerRef.oneboxRef.ssl.renewCertificate(dataArg.domain);
|
||||||
|
const certificate = this.opsServerRef.oneboxRef.ssl.getCertificate(dataArg.domain);
|
||||||
|
return { certificate: certificate as unknown as interfaces.data.ICertificate };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
26
ts/opsserver/handlers/status.handler.ts
Normal file
26
ts/opsserver/handlers/status.handler.ts
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import * as plugins from '../../plugins.ts';
|
||||||
|
import type { OpsServer } from '../classes.opsserver.ts';
|
||||||
|
import * as interfaces from '../../../ts_interfaces/index.ts';
|
||||||
|
import { requireValidIdentity } from '../helpers/guards.ts';
|
||||||
|
|
||||||
|
export class StatusHandler {
|
||||||
|
public typedrouter = new plugins.typedrequest.TypedRouter();
|
||||||
|
|
||||||
|
constructor(private opsServerRef: OpsServer) {
|
||||||
|
this.opsServerRef.typedrouter.addTypedRouter(this.typedrouter);
|
||||||
|
this.registerHandlers();
|
||||||
|
}
|
||||||
|
|
||||||
|
private registerHandlers(): void {
|
||||||
|
this.typedrouter.addTypedHandler(
|
||||||
|
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetSystemStatus>(
|
||||||
|
'getSystemStatus',
|
||||||
|
async (dataArg) => {
|
||||||
|
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg);
|
||||||
|
const status = await this.opsServerRef.oneboxRef.getSystemStatus();
|
||||||
|
return { status: status as unknown as interfaces.data.ISystemStatus };
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
29
ts/opsserver/helpers/guards.ts
Normal file
29
ts/opsserver/helpers/guards.ts
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import * as plugins from '../../plugins.ts';
|
||||||
|
import type { AdminHandler } from '../handlers/admin.handler.ts';
|
||||||
|
import * as interfaces from '../../../ts_interfaces/index.ts';
|
||||||
|
|
||||||
|
export async function requireValidIdentity<T extends { identity?: interfaces.data.IIdentity }>(
|
||||||
|
adminHandler: AdminHandler,
|
||||||
|
dataArg: T,
|
||||||
|
): Promise<void> {
|
||||||
|
if (!dataArg.identity) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('No identity provided');
|
||||||
|
}
|
||||||
|
const passed = await adminHandler.validIdentityGuard.exec({ identity: dataArg.identity });
|
||||||
|
if (!passed) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Valid identity required');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function requireAdminIdentity<T extends { identity?: interfaces.data.IIdentity }>(
|
||||||
|
adminHandler: AdminHandler,
|
||||||
|
dataArg: T,
|
||||||
|
): Promise<void> {
|
||||||
|
if (!dataArg.identity) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('No identity provided');
|
||||||
|
}
|
||||||
|
const passed = await adminHandler.adminIdentityGuard.exec({ identity: dataArg.identity });
|
||||||
|
if (!passed) {
|
||||||
|
throw new plugins.typedrequest.TypedResponseError('Admin access required');
|
||||||
|
}
|
||||||
|
}
|
||||||
1
ts/opsserver/index.ts
Normal file
1
ts/opsserver/index.ts
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export * from './classes.opsserver.ts';
|
||||||
@@ -41,6 +41,10 @@ export { smartregistry };
|
|||||||
import * as smarts3 from '@push.rocks/smarts3';
|
import * as smarts3 from '@push.rocks/smarts3';
|
||||||
export { smarts3 };
|
export { smarts3 };
|
||||||
|
|
||||||
|
// Task scheduling and cron jobs
|
||||||
|
import * as taskbuffer from '@push.rocks/taskbuffer';
|
||||||
|
export { taskbuffer };
|
||||||
|
|
||||||
// Crypto utilities (for password hashing, encryption)
|
// Crypto utilities (for password hashing, encryption)
|
||||||
import * as bcrypt from 'https://deno.land/x/bcrypt@v0.4.1/mod.ts';
|
import * as bcrypt from 'https://deno.land/x/bcrypt@v0.4.1/mod.ts';
|
||||||
export { bcrypt };
|
export { bcrypt };
|
||||||
@@ -57,3 +61,13 @@ export { crypto };
|
|||||||
import * as nodeHttps from 'node:https';
|
import * as nodeHttps from 'node:https';
|
||||||
import * as nodeHttp from 'node:http';
|
import * as nodeHttp from 'node:http';
|
||||||
export { nodeHttps, nodeHttp };
|
export { nodeHttps, nodeHttp };
|
||||||
|
|
||||||
|
// TypedRequest/TypedServer infrastructure
|
||||||
|
import * as typedrequest from '@api.global/typedrequest';
|
||||||
|
import * as typedserver from '@api.global/typedserver';
|
||||||
|
export { typedrequest, typedserver };
|
||||||
|
|
||||||
|
// Auth & Guards
|
||||||
|
import * as smartguard from '@push.rocks/smartguard';
|
||||||
|
import * as smartjwt from '@push.rocks/smartjwt';
|
||||||
|
export { smartguard, smartjwt };
|
||||||
|
|||||||
127
ts/types.ts
127
ts/types.ts
@@ -23,6 +23,8 @@ export interface IService {
|
|||||||
imageDigest?: string;
|
imageDigest?: string;
|
||||||
// Platform service requirements
|
// Platform service requirements
|
||||||
platformRequirements?: IPlatformRequirements;
|
platformRequirements?: IPlatformRequirements;
|
||||||
|
// Backup settings
|
||||||
|
includeImageInBackup?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Registry types
|
// Registry types
|
||||||
@@ -317,3 +319,128 @@ export interface ICliArgs {
|
|||||||
_: string[];
|
_: string[];
|
||||||
[key: string]: unknown;
|
[key: string]: unknown;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Backup types
|
||||||
|
export type TBackupRestoreMode = 'restore' | 'import' | 'clone';
|
||||||
|
|
||||||
|
// Retention policy for GFS (Grandfather-Father-Son) time-window based retention
|
||||||
|
export interface IRetentionPolicy {
|
||||||
|
hourly: number; // 0 = disabled, else keep up to N backups from last 24h
|
||||||
|
daily: number; // Keep 1 backup per day for last N days
|
||||||
|
weekly: number; // Keep 1 backup per week for last N weeks
|
||||||
|
monthly: number; // Keep 1 backup per month for last N months
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default retention presets
|
||||||
|
export const RETENTION_PRESETS = {
|
||||||
|
standard: { hourly: 0, daily: 7, weekly: 4, monthly: 12 },
|
||||||
|
frequent: { hourly: 24, daily: 7, weekly: 4, monthly: 12 },
|
||||||
|
minimal: { hourly: 0, daily: 3, weekly: 2, monthly: 6 },
|
||||||
|
longterm: { hourly: 0, daily: 14, weekly: 8, monthly: 24 },
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
export type TRetentionPreset = keyof typeof RETENTION_PRESETS | 'custom';
|
||||||
|
|
||||||
|
export interface IBackup {
|
||||||
|
id?: number;
|
||||||
|
serviceId: number;
|
||||||
|
serviceName: string; // Denormalized for display
|
||||||
|
filename: string;
|
||||||
|
sizeBytes: number;
|
||||||
|
createdAt: number;
|
||||||
|
includesImage: boolean;
|
||||||
|
platformResources: TPlatformServiceType[]; // Which platform types were backed up
|
||||||
|
checksum: string;
|
||||||
|
// Scheduled backup fields
|
||||||
|
scheduleId?: number; // Links backup to its schedule for retention
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IBackupManifest {
|
||||||
|
version: string;
|
||||||
|
createdAt: number;
|
||||||
|
oneboxVersion: string;
|
||||||
|
serviceName: string;
|
||||||
|
includesImage: boolean;
|
||||||
|
platformResources: TPlatformServiceType[];
|
||||||
|
checksum: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IBackupServiceConfig {
|
||||||
|
name: string;
|
||||||
|
image: string;
|
||||||
|
registry?: string;
|
||||||
|
envVars: Record<string, string>;
|
||||||
|
port: number;
|
||||||
|
domain?: string;
|
||||||
|
useOneboxRegistry?: boolean;
|
||||||
|
registryRepository?: string;
|
||||||
|
registryImageTag?: string;
|
||||||
|
autoUpdateOnPush?: boolean;
|
||||||
|
platformRequirements?: IPlatformRequirements;
|
||||||
|
includeImageInBackup?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IBackupPlatformResource {
|
||||||
|
resourceType: TPlatformResourceType;
|
||||||
|
resourceName: string;
|
||||||
|
platformServiceType: TPlatformServiceType;
|
||||||
|
credentials: Record<string, string>; // Decrypted for backup, re-encrypted on restore
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IBackupResult {
|
||||||
|
backup: IBackup;
|
||||||
|
filePath: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IRestoreOptions {
|
||||||
|
mode: TBackupRestoreMode;
|
||||||
|
newServiceName?: string; // Required for 'import' and 'clone' modes
|
||||||
|
skipPlatformData?: boolean; // Restore config only, skip DB/bucket data
|
||||||
|
overwriteExisting?: boolean; // For 'restore' mode
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IRestoreResult {
|
||||||
|
service: IService;
|
||||||
|
platformResourcesRestored: number;
|
||||||
|
warnings: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Backup scheduling types (GFS retention scheme)
|
||||||
|
export type TBackupScheduleScope = 'all' | 'pattern' | 'service';
|
||||||
|
|
||||||
|
export interface IBackupSchedule {
|
||||||
|
id?: number;
|
||||||
|
scopeType: TBackupScheduleScope;
|
||||||
|
scopePattern?: string; // Glob pattern for 'pattern' scope type
|
||||||
|
serviceId?: number; // Only for 'service' scope type
|
||||||
|
serviceName?: string; // Only for 'service' scope type
|
||||||
|
cronExpression: string;
|
||||||
|
retention: IRetentionPolicy; // Per-tier retention counts
|
||||||
|
enabled: boolean;
|
||||||
|
lastRunAt: number | null;
|
||||||
|
nextRunAt: number | null;
|
||||||
|
lastStatus: 'success' | 'failed' | null;
|
||||||
|
lastError: string | null;
|
||||||
|
createdAt: number;
|
||||||
|
updatedAt: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IBackupScheduleCreate {
|
||||||
|
scopeType: TBackupScheduleScope;
|
||||||
|
scopePattern?: string; // Required for 'pattern' scope type
|
||||||
|
serviceName?: string; // Required for 'service' scope type
|
||||||
|
cronExpression: string;
|
||||||
|
retention: IRetentionPolicy;
|
||||||
|
enabled?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IBackupScheduleUpdate {
|
||||||
|
cronExpression?: string;
|
||||||
|
retention?: IRetentionPolicy;
|
||||||
|
enabled?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Backup creation options (for scheduled backups)
|
||||||
|
export interface IBackupCreateOptions {
|
||||||
|
scheduleId?: number;
|
||||||
|
}
|
||||||
|
|||||||
11
ts_bundled/bundle.ts
Normal file
11
ts_bundled/bundle.ts
Normal file
File diff suppressed because one or more lines are too long
16
ts_interfaces/data/auth.ts
Normal file
16
ts_interfaces/data/auth.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
/**
|
||||||
|
* Auth-related data shapes for Onebox
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface IIdentity {
|
||||||
|
jwt: string;
|
||||||
|
userId: string;
|
||||||
|
username: string;
|
||||||
|
expiresAt: number;
|
||||||
|
role: 'admin' | 'user';
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IUser {
|
||||||
|
username: string;
|
||||||
|
role: 'admin' | 'user';
|
||||||
|
}
|
||||||
89
ts_interfaces/data/backup.ts
Normal file
89
ts_interfaces/data/backup.ts
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
/**
|
||||||
|
* Backup-related data shapes for Onebox
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { TPlatformServiceType } from './platform.ts';
|
||||||
|
|
||||||
|
export type TBackupRestoreMode = 'restore' | 'import' | 'clone';
|
||||||
|
export type TBackupScheduleScope = 'all' | 'pattern' | 'service';
|
||||||
|
|
||||||
|
export interface IRetentionPolicy {
|
||||||
|
hourly: number;
|
||||||
|
daily: number;
|
||||||
|
weekly: number;
|
||||||
|
monthly: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const RETENTION_PRESETS = {
|
||||||
|
standard: { hourly: 0, daily: 7, weekly: 4, monthly: 12 },
|
||||||
|
frequent: { hourly: 24, daily: 7, weekly: 4, monthly: 12 },
|
||||||
|
minimal: { hourly: 0, daily: 3, weekly: 2, monthly: 6 },
|
||||||
|
longterm: { hourly: 0, daily: 14, weekly: 8, monthly: 24 },
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
export type TRetentionPreset = keyof typeof RETENTION_PRESETS | 'custom';
|
||||||
|
|
||||||
|
export interface IBackup {
|
||||||
|
id?: number;
|
||||||
|
serviceId: number;
|
||||||
|
serviceName: string;
|
||||||
|
filename: string;
|
||||||
|
sizeBytes: number;
|
||||||
|
createdAt: number;
|
||||||
|
includesImage: boolean;
|
||||||
|
platformResources: TPlatformServiceType[];
|
||||||
|
checksum: string;
|
||||||
|
scheduleId?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IBackupSchedule {
|
||||||
|
id?: number;
|
||||||
|
scopeType: TBackupScheduleScope;
|
||||||
|
scopePattern?: string;
|
||||||
|
serviceId?: number;
|
||||||
|
serviceName?: string;
|
||||||
|
cronExpression: string;
|
||||||
|
retention: IRetentionPolicy;
|
||||||
|
enabled: boolean;
|
||||||
|
lastRunAt: number | null;
|
||||||
|
nextRunAt: number | null;
|
||||||
|
lastStatus: 'success' | 'failed' | null;
|
||||||
|
lastError: string | null;
|
||||||
|
createdAt: number;
|
||||||
|
updatedAt: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IBackupScheduleCreate {
|
||||||
|
scopeType: TBackupScheduleScope;
|
||||||
|
scopePattern?: string;
|
||||||
|
serviceName?: string;
|
||||||
|
cronExpression: string;
|
||||||
|
retention: IRetentionPolicy;
|
||||||
|
enabled?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IBackupScheduleUpdate {
|
||||||
|
cronExpression?: string;
|
||||||
|
retention?: IRetentionPolicy;
|
||||||
|
enabled?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IRestoreOptions {
|
||||||
|
mode: TBackupRestoreMode;
|
||||||
|
newServiceName?: string;
|
||||||
|
overwriteExisting?: boolean;
|
||||||
|
skipPlatformData?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IRestoreResult {
|
||||||
|
service: {
|
||||||
|
name: string;
|
||||||
|
status: string;
|
||||||
|
};
|
||||||
|
platformResourcesRestored: number;
|
||||||
|
warnings: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IBackupPasswordStatus {
|
||||||
|
isConfigured: boolean;
|
||||||
|
}
|
||||||
59
ts_interfaces/data/domain.ts
Normal file
59
ts_interfaces/data/domain.ts
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
/**
|
||||||
|
* Domain, DNS, and certificate data shapes for Onebox
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface IDomain {
|
||||||
|
id?: number;
|
||||||
|
domain: string;
|
||||||
|
dnsProvider: 'cloudflare' | 'manual' | null;
|
||||||
|
cloudflareZoneId?: string;
|
||||||
|
isObsolete: boolean;
|
||||||
|
defaultWildcard: boolean;
|
||||||
|
createdAt: number;
|
||||||
|
updatedAt: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ICertificate {
|
||||||
|
id?: number;
|
||||||
|
domainId: number;
|
||||||
|
certDomain: string;
|
||||||
|
isWildcard: boolean;
|
||||||
|
certPem: string;
|
||||||
|
keyPem: string;
|
||||||
|
fullchainPem: string;
|
||||||
|
expiryDate: number;
|
||||||
|
issuer: string;
|
||||||
|
isValid: boolean;
|
||||||
|
createdAt: number;
|
||||||
|
updatedAt: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ICertRequirement {
|
||||||
|
id?: number;
|
||||||
|
domainId: number;
|
||||||
|
serviceId: number;
|
||||||
|
subdomain: string;
|
||||||
|
status: 'pending' | 'active' | 'renewing' | 'failed';
|
||||||
|
certificateId?: number;
|
||||||
|
createdAt: number;
|
||||||
|
updatedAt: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IDomainDetail {
|
||||||
|
domain: IDomain;
|
||||||
|
certificates: ICertificate[];
|
||||||
|
requirements: ICertRequirement[];
|
||||||
|
serviceCount: number;
|
||||||
|
certificateStatus: 'valid' | 'expiring-soon' | 'expired' | 'pending' | 'none';
|
||||||
|
daysRemaining: number | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IDnsRecord {
|
||||||
|
id?: number;
|
||||||
|
domain: string;
|
||||||
|
type: 'A' | 'AAAA' | 'CNAME';
|
||||||
|
value: string;
|
||||||
|
cloudflareID?: string;
|
||||||
|
createdAt: number;
|
||||||
|
updatedAt: number;
|
||||||
|
}
|
||||||
9
ts_interfaces/data/index.ts
Normal file
9
ts_interfaces/data/index.ts
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
export * from './auth.ts';
|
||||||
|
export * from './service.ts';
|
||||||
|
export * from './platform.ts';
|
||||||
|
export * from './network.ts';
|
||||||
|
export * from './domain.ts';
|
||||||
|
export * from './registry.ts';
|
||||||
|
export * from './backup.ts';
|
||||||
|
export * from './settings.ts';
|
||||||
|
export * from './system.ts';
|
||||||
64
ts_interfaces/data/network.ts
Normal file
64
ts_interfaces/data/network.ts
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
/**
|
||||||
|
* Network-related data shapes for Onebox
|
||||||
|
*/
|
||||||
|
|
||||||
|
export type TNetworkTargetType = 'service' | 'registry' | 'platform';
|
||||||
|
|
||||||
|
export interface INetworkTarget {
|
||||||
|
type: TNetworkTargetType;
|
||||||
|
name: string;
|
||||||
|
domain: string | null;
|
||||||
|
targetHost: string;
|
||||||
|
targetPort: number;
|
||||||
|
status: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface INetworkStats {
|
||||||
|
proxy: {
|
||||||
|
running: boolean;
|
||||||
|
httpPort: number;
|
||||||
|
httpsPort: number;
|
||||||
|
routes: number;
|
||||||
|
certificates: number;
|
||||||
|
};
|
||||||
|
logReceiver: {
|
||||||
|
running: boolean;
|
||||||
|
port: number;
|
||||||
|
clients: number;
|
||||||
|
connections: number;
|
||||||
|
sampleRate: number;
|
||||||
|
recentLogsCount: number;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ITrafficStats {
|
||||||
|
requestCount: number;
|
||||||
|
errorCount: number;
|
||||||
|
avgResponseTime: number;
|
||||||
|
totalBytes: number;
|
||||||
|
statusCounts: Record<string, number>;
|
||||||
|
requestsPerMinute: number;
|
||||||
|
errorRate: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ICaddyAccessLog {
|
||||||
|
ts: number;
|
||||||
|
request: {
|
||||||
|
remote_ip: string;
|
||||||
|
method: string;
|
||||||
|
host: string;
|
||||||
|
uri: string;
|
||||||
|
proto: string;
|
||||||
|
};
|
||||||
|
status: number;
|
||||||
|
duration: number;
|
||||||
|
size: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface INetworkLogMessage {
|
||||||
|
type: 'connected' | 'access_log' | 'filter_updated';
|
||||||
|
clientId?: string;
|
||||||
|
filter?: { domain?: string; sampleRate?: number };
|
||||||
|
data?: ICaddyAccessLog;
|
||||||
|
timestamp: number;
|
||||||
|
}
|
||||||
37
ts_interfaces/data/platform.ts
Normal file
37
ts_interfaces/data/platform.ts
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
/**
|
||||||
|
* Platform service data shapes for Onebox
|
||||||
|
*/
|
||||||
|
|
||||||
|
export type TPlatformServiceType = 'mongodb' | 'minio' | 'redis' | 'postgresql' | 'rabbitmq' | 'caddy' | 'clickhouse';
|
||||||
|
export type TPlatformServiceStatus = 'not-deployed' | 'stopped' | 'starting' | 'running' | 'stopping' | 'failed';
|
||||||
|
export type TPlatformResourceType = 'database' | 'bucket' | 'cache' | 'queue';
|
||||||
|
|
||||||
|
export interface IPlatformRequirements {
|
||||||
|
mongodb?: boolean;
|
||||||
|
s3?: boolean;
|
||||||
|
clickhouse?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IPlatformService {
|
||||||
|
type: TPlatformServiceType;
|
||||||
|
displayName: string;
|
||||||
|
resourceTypes: TPlatformResourceType[];
|
||||||
|
status: TPlatformServiceStatus;
|
||||||
|
containerId?: string;
|
||||||
|
isCore?: boolean;
|
||||||
|
createdAt?: number;
|
||||||
|
updatedAt?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IPlatformResource {
|
||||||
|
id: number;
|
||||||
|
resourceType: TPlatformResourceType;
|
||||||
|
resourceName: string;
|
||||||
|
platformService: {
|
||||||
|
type: TPlatformServiceType;
|
||||||
|
name: string;
|
||||||
|
status: TPlatformServiceStatus;
|
||||||
|
};
|
||||||
|
envVars: Record<string, string>;
|
||||||
|
createdAt: number;
|
||||||
|
}
|
||||||
35
ts_interfaces/data/registry.ts
Normal file
35
ts_interfaces/data/registry.ts
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
/**
|
||||||
|
* Registry-related data shapes for Onebox
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface IRegistry {
|
||||||
|
id?: number;
|
||||||
|
url: string;
|
||||||
|
username: string;
|
||||||
|
createdAt: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IRegistryToken {
|
||||||
|
id: number;
|
||||||
|
name: string;
|
||||||
|
type: 'global' | 'ci';
|
||||||
|
scope: 'all' | string[];
|
||||||
|
scopeDisplay: string;
|
||||||
|
expiresAt: number | null;
|
||||||
|
createdAt: number;
|
||||||
|
lastUsedAt: number | null;
|
||||||
|
createdBy: string;
|
||||||
|
isExpired: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ICreateTokenRequest {
|
||||||
|
name: string;
|
||||||
|
type: 'global' | 'ci';
|
||||||
|
scope: 'all' | string[];
|
||||||
|
expiresIn: '30d' | '90d' | '365d' | 'never';
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ITokenCreatedResponse {
|
||||||
|
token: IRegistryToken;
|
||||||
|
plainToken: string;
|
||||||
|
}
|
||||||
82
ts_interfaces/data/service.ts
Normal file
82
ts_interfaces/data/service.ts
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
/**
|
||||||
|
* Service-related data shapes for Onebox
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { IPlatformRequirements } from './platform.ts';
|
||||||
|
|
||||||
|
export type TServiceStatus = 'stopped' | 'starting' | 'running' | 'stopping' | 'failed';
|
||||||
|
|
||||||
|
export interface IService {
|
||||||
|
id?: number;
|
||||||
|
name: string;
|
||||||
|
image: string;
|
||||||
|
registry?: string;
|
||||||
|
envVars: Record<string, string>;
|
||||||
|
port: number;
|
||||||
|
domain?: string;
|
||||||
|
containerID?: string;
|
||||||
|
status: TServiceStatus;
|
||||||
|
createdAt: number;
|
||||||
|
updatedAt: number;
|
||||||
|
// Onebox Registry fields
|
||||||
|
useOneboxRegistry?: boolean;
|
||||||
|
registryRepository?: string;
|
||||||
|
registryImageTag?: string;
|
||||||
|
autoUpdateOnPush?: boolean;
|
||||||
|
imageDigest?: string;
|
||||||
|
// Platform service requirements
|
||||||
|
platformRequirements?: IPlatformRequirements;
|
||||||
|
// Backup settings
|
||||||
|
includeImageInBackup?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IServiceCreate {
|
||||||
|
name: string;
|
||||||
|
image: string;
|
||||||
|
port: number;
|
||||||
|
domain?: string;
|
||||||
|
envVars?: Record<string, string>;
|
||||||
|
useOneboxRegistry?: boolean;
|
||||||
|
registryImageTag?: string;
|
||||||
|
autoUpdateOnPush?: boolean;
|
||||||
|
enableMongoDB?: boolean;
|
||||||
|
enableS3?: boolean;
|
||||||
|
enableClickHouse?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IServiceUpdate {
|
||||||
|
image?: string;
|
||||||
|
registry?: string;
|
||||||
|
port?: number;
|
||||||
|
domain?: string;
|
||||||
|
envVars?: Record<string, string>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IContainerStats {
|
||||||
|
cpuPercent: number;
|
||||||
|
memoryUsed: number;
|
||||||
|
memoryLimit: number;
|
||||||
|
memoryPercent: number;
|
||||||
|
networkRx: number;
|
||||||
|
networkTx: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IMetric {
|
||||||
|
id?: number;
|
||||||
|
serviceId: number;
|
||||||
|
timestamp: number;
|
||||||
|
cpuPercent: number;
|
||||||
|
memoryUsed: number;
|
||||||
|
memoryLimit: number;
|
||||||
|
networkRxBytes: number;
|
||||||
|
networkTxBytes: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ILogEntry {
|
||||||
|
id?: number;
|
||||||
|
serviceId: number;
|
||||||
|
timestamp: number;
|
||||||
|
message: string;
|
||||||
|
level: 'info' | 'warn' | 'error' | 'debug';
|
||||||
|
source: 'stdout' | 'stderr';
|
||||||
|
}
|
||||||
14
ts_interfaces/data/settings.ts
Normal file
14
ts_interfaces/data/settings.ts
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
/**
|
||||||
|
* Settings data shapes for Onebox
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface ISettings {
|
||||||
|
cloudflareToken: string;
|
||||||
|
cloudflareZoneId: string;
|
||||||
|
autoRenewCerts: boolean;
|
||||||
|
renewalThreshold: number;
|
||||||
|
acmeEmail: string;
|
||||||
|
httpPort: number;
|
||||||
|
httpsPort: number;
|
||||||
|
forceHttps: boolean;
|
||||||
|
}
|
||||||
32
ts_interfaces/data/system.ts
Normal file
32
ts_interfaces/data/system.ts
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
/**
|
||||||
|
* System status data shapes for Onebox
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { TPlatformServiceType, TPlatformServiceStatus } from './platform.ts';
|
||||||
|
|
||||||
|
export interface ISystemStatus {
|
||||||
|
docker: {
|
||||||
|
running: boolean;
|
||||||
|
version: unknown;
|
||||||
|
};
|
||||||
|
reverseProxy: {
|
||||||
|
http: { running: boolean; port: number };
|
||||||
|
https: { running: boolean; port: number; certificates: number };
|
||||||
|
routes: number;
|
||||||
|
};
|
||||||
|
dns: { configured: boolean };
|
||||||
|
ssl: { configured: boolean; certificateCount: number };
|
||||||
|
services: { total: number; running: number; stopped: number };
|
||||||
|
platformServices: Array<{
|
||||||
|
type: TPlatformServiceType;
|
||||||
|
displayName: string;
|
||||||
|
status: TPlatformServiceStatus;
|
||||||
|
resourceCount: number;
|
||||||
|
}>;
|
||||||
|
certificateHealth: {
|
||||||
|
valid: number;
|
||||||
|
expiringSoon: number;
|
||||||
|
expired: number;
|
||||||
|
expiringDomains: Array<{ domain: string; daysRemaining: number }>;
|
||||||
|
};
|
||||||
|
}
|
||||||
9
ts_interfaces/index.ts
Normal file
9
ts_interfaces/index.ts
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
export * from './plugins.ts';
|
||||||
|
|
||||||
|
// Data types
|
||||||
|
import * as data from './data/index.ts';
|
||||||
|
export { data };
|
||||||
|
|
||||||
|
// Request interfaces
|
||||||
|
import * as requests from './requests/index.ts';
|
||||||
|
export { requests };
|
||||||
6
ts_interfaces/plugins.ts
Normal file
6
ts_interfaces/plugins.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
// @apiglobal scope
|
||||||
|
import * as typedrequestInterfaces from '@api.global/typedrequest-interfaces';
|
||||||
|
|
||||||
|
export {
|
||||||
|
typedrequestInterfaces,
|
||||||
|
};
|
||||||
58
ts_interfaces/requests/admin.ts
Normal file
58
ts_interfaces/requests/admin.ts
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
import * as plugins from '../plugins.ts';
|
||||||
|
import * as data from '../data/index.ts';
|
||||||
|
|
||||||
|
export interface IReq_AdminLoginWithUsernameAndPassword extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_AdminLoginWithUsernameAndPassword
|
||||||
|
> {
|
||||||
|
method: 'adminLoginWithUsernameAndPassword';
|
||||||
|
request: {
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
identity?: data.IIdentity;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_AdminLogout extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_AdminLogout
|
||||||
|
> {
|
||||||
|
method: 'adminLogout';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
ok: boolean;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_VerifyIdentity extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_VerifyIdentity
|
||||||
|
> {
|
||||||
|
method: 'verifyIdentity';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
valid: boolean;
|
||||||
|
identity?: data.IIdentity;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_ChangePassword extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_ChangePassword
|
||||||
|
> {
|
||||||
|
method: 'changePassword';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
currentPassword: string;
|
||||||
|
newPassword: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
ok: boolean;
|
||||||
|
};
|
||||||
|
}
|
||||||
86
ts_interfaces/requests/backup-schedules.ts
Normal file
86
ts_interfaces/requests/backup-schedules.ts
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
import * as plugins from '../plugins.ts';
|
||||||
|
import * as data from '../data/index.ts';
|
||||||
|
|
||||||
|
export interface IReq_GetBackupSchedules extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetBackupSchedules
|
||||||
|
> {
|
||||||
|
method: 'getBackupSchedules';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
schedules: data.IBackupSchedule[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_CreateBackupSchedule extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_CreateBackupSchedule
|
||||||
|
> {
|
||||||
|
method: 'createBackupSchedule';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
scheduleConfig: data.IBackupScheduleCreate;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
schedule: data.IBackupSchedule;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetBackupSchedule extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetBackupSchedule
|
||||||
|
> {
|
||||||
|
method: 'getBackupSchedule';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
scheduleId: number;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
schedule: data.IBackupSchedule;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_UpdateBackupSchedule extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_UpdateBackupSchedule
|
||||||
|
> {
|
||||||
|
method: 'updateBackupSchedule';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
scheduleId: number;
|
||||||
|
updates: data.IBackupScheduleUpdate;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
schedule: data.IBackupSchedule;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_DeleteBackupSchedule extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_DeleteBackupSchedule
|
||||||
|
> {
|
||||||
|
method: 'deleteBackupSchedule';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
scheduleId: number;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
ok: boolean;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_TriggerBackupSchedule extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_TriggerBackupSchedule
|
||||||
|
> {
|
||||||
|
method: 'triggerBackupSchedule';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
scheduleId: number;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
backup: data.IBackup;
|
||||||
|
};
|
||||||
|
}
|
||||||
73
ts_interfaces/requests/backups.ts
Normal file
73
ts_interfaces/requests/backups.ts
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
import * as plugins from '../plugins.ts';
|
||||||
|
import * as data from '../data/index.ts';
|
||||||
|
|
||||||
|
export interface IReq_GetBackups extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetBackups
|
||||||
|
> {
|
||||||
|
method: 'getBackups';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
backups: data.IBackup[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetBackup extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetBackup
|
||||||
|
> {
|
||||||
|
method: 'getBackup';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
backupId: number;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
backup: data.IBackup;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_DeleteBackup extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_DeleteBackup
|
||||||
|
> {
|
||||||
|
method: 'deleteBackup';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
backupId: number;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
ok: boolean;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_RestoreBackup extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_RestoreBackup
|
||||||
|
> {
|
||||||
|
method: 'restoreBackup';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
backupId: number;
|
||||||
|
options: data.IRestoreOptions;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
result: data.IRestoreResult;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_DownloadBackup extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_DownloadBackup
|
||||||
|
> {
|
||||||
|
method: 'downloadBackup';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
backupId: number;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
downloadUrl: string;
|
||||||
|
filename: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
58
ts_interfaces/requests/dns.ts
Normal file
58
ts_interfaces/requests/dns.ts
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
import * as plugins from '../plugins.ts';
|
||||||
|
import * as data from '../data/index.ts';
|
||||||
|
|
||||||
|
export interface IReq_GetDnsRecords extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetDnsRecords
|
||||||
|
> {
|
||||||
|
method: 'getDnsRecords';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
records: data.IDnsRecord[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_CreateDnsRecord extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_CreateDnsRecord
|
||||||
|
> {
|
||||||
|
method: 'createDnsRecord';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
domain: string;
|
||||||
|
type: 'A' | 'AAAA' | 'CNAME';
|
||||||
|
value: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
record: data.IDnsRecord;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_DeleteDnsRecord extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_DeleteDnsRecord
|
||||||
|
> {
|
||||||
|
method: 'deleteDnsRecord';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
domain: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
ok: boolean;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_SyncDns extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_SyncDns
|
||||||
|
> {
|
||||||
|
method: 'syncDns';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
records: data.IDnsRecord[];
|
||||||
|
};
|
||||||
|
}
|
||||||
42
ts_interfaces/requests/domains.ts
Normal file
42
ts_interfaces/requests/domains.ts
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
import * as plugins from '../plugins.ts';
|
||||||
|
import * as data from '../data/index.ts';
|
||||||
|
|
||||||
|
export interface IReq_GetDomains extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetDomains
|
||||||
|
> {
|
||||||
|
method: 'getDomains';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
domains: data.IDomainDetail[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetDomain extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetDomain
|
||||||
|
> {
|
||||||
|
method: 'getDomain';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
domainName: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
domain: data.IDomainDetail;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_SyncDomains extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_SyncDomains
|
||||||
|
> {
|
||||||
|
method: 'syncDomains';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
domains: data.IDomainDetail[];
|
||||||
|
};
|
||||||
|
}
|
||||||
13
ts_interfaces/requests/index.ts
Normal file
13
ts_interfaces/requests/index.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
export * from './admin.ts';
|
||||||
|
export * from './status.ts';
|
||||||
|
export * from './services.ts';
|
||||||
|
export * from './platform-services.ts';
|
||||||
|
export * from './ssl.ts';
|
||||||
|
export * from './domains.ts';
|
||||||
|
export * from './dns.ts';
|
||||||
|
export * from './registry.ts';
|
||||||
|
export * from './network.ts';
|
||||||
|
export * from './backups.ts';
|
||||||
|
export * from './backup-schedules.ts';
|
||||||
|
export * from './settings.ts';
|
||||||
|
export * from './logs.ts';
|
||||||
60
ts_interfaces/requests/logs.ts
Normal file
60
ts_interfaces/requests/logs.ts
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
import * as plugins from '../plugins.ts';
|
||||||
|
import * as data from '../data/index.ts';
|
||||||
|
|
||||||
|
export interface IReq_GetServiceLogStream extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetServiceLogStream
|
||||||
|
> {
|
||||||
|
method: 'getServiceLogStream';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceName: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
logStream: plugins.typedrequestInterfaces.IVirtualStream;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetPlatformServiceLogStream extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetPlatformServiceLogStream
|
||||||
|
> {
|
||||||
|
method: 'getPlatformServiceLogStream';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceType: data.TPlatformServiceType;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
logStream: plugins.typedrequestInterfaces.IVirtualStream;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetNetworkLogStream extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetNetworkLogStream
|
||||||
|
> {
|
||||||
|
method: 'getNetworkLogStream';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
filter?: {
|
||||||
|
domain?: string;
|
||||||
|
sampleRate?: number;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
logStream: plugins.typedrequestInterfaces.IVirtualStream;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetEventStream extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetEventStream
|
||||||
|
> {
|
||||||
|
method: 'getEventStream';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
eventStream: plugins.typedrequestInterfaces.IVirtualStream;
|
||||||
|
};
|
||||||
|
}
|
||||||
41
ts_interfaces/requests/network.ts
Normal file
41
ts_interfaces/requests/network.ts
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import * as plugins from '../plugins.ts';
|
||||||
|
import * as data from '../data/index.ts';
|
||||||
|
|
||||||
|
export interface IReq_GetNetworkTargets extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetNetworkTargets
|
||||||
|
> {
|
||||||
|
method: 'getNetworkTargets';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
targets: data.INetworkTarget[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetNetworkStats extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetNetworkStats
|
||||||
|
> {
|
||||||
|
method: 'getNetworkStats';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
stats: data.INetworkStats;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetTrafficStats extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetTrafficStats
|
||||||
|
> {
|
||||||
|
method: 'getTrafficStats';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
stats: data.ITrafficStats;
|
||||||
|
};
|
||||||
|
}
|
||||||
71
ts_interfaces/requests/platform-services.ts
Normal file
71
ts_interfaces/requests/platform-services.ts
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
import * as plugins from '../plugins.ts';
|
||||||
|
import * as data from '../data/index.ts';
|
||||||
|
|
||||||
|
export interface IReq_GetPlatformServices extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetPlatformServices
|
||||||
|
> {
|
||||||
|
method: 'getPlatformServices';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
platformServices: data.IPlatformService[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetPlatformService extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetPlatformService
|
||||||
|
> {
|
||||||
|
method: 'getPlatformService';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceType: data.TPlatformServiceType;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
platformService: data.IPlatformService;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_StartPlatformService extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_StartPlatformService
|
||||||
|
> {
|
||||||
|
method: 'startPlatformService';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceType: data.TPlatformServiceType;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
platformService: data.IPlatformService;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_StopPlatformService extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_StopPlatformService
|
||||||
|
> {
|
||||||
|
method: 'stopPlatformService';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceType: data.TPlatformServiceType;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
platformService: data.IPlatformService;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetPlatformServiceStats extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetPlatformServiceStats
|
||||||
|
> {
|
||||||
|
method: 'getPlatformServiceStats';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceType: data.TPlatformServiceType;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
stats: data.IContainerStats;
|
||||||
|
};
|
||||||
|
}
|
||||||
57
ts_interfaces/requests/registry.ts
Normal file
57
ts_interfaces/requests/registry.ts
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
import * as plugins from '../plugins.ts';
|
||||||
|
import * as data from '../data/index.ts';
|
||||||
|
|
||||||
|
export interface IReq_GetRegistryTags extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetRegistryTags
|
||||||
|
> {
|
||||||
|
method: 'getRegistryTags';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceName: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
tags: string[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetRegistryTokens extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetRegistryTokens
|
||||||
|
> {
|
||||||
|
method: 'getRegistryTokens';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
tokens: data.IRegistryToken[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_CreateRegistryToken extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_CreateRegistryToken
|
||||||
|
> {
|
||||||
|
method: 'createRegistryToken';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
tokenConfig: data.ICreateTokenRequest;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
result: data.ITokenCreatedResponse;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_DeleteRegistryToken extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_DeleteRegistryToken
|
||||||
|
> {
|
||||||
|
method: 'deleteRegistryToken';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
tokenId: number;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
ok: boolean;
|
||||||
|
};
|
||||||
|
}
|
||||||
214
ts_interfaces/requests/services.ts
Normal file
214
ts_interfaces/requests/services.ts
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
import * as plugins from '../plugins.ts';
|
||||||
|
import * as data from '../data/index.ts';
|
||||||
|
|
||||||
|
export interface IReq_GetServices extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetServices
|
||||||
|
> {
|
||||||
|
method: 'getServices';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
services: data.IService[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetService extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetService
|
||||||
|
> {
|
||||||
|
method: 'getService';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceName: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
service: data.IService;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_CreateService extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_CreateService
|
||||||
|
> {
|
||||||
|
method: 'createService';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceConfig: data.IServiceCreate;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
service: data.IService;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_UpdateService extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_UpdateService
|
||||||
|
> {
|
||||||
|
method: 'updateService';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceName: string;
|
||||||
|
updates: data.IServiceUpdate;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
service: data.IService;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_DeleteService extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_DeleteService
|
||||||
|
> {
|
||||||
|
method: 'deleteService';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceName: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
ok: boolean;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_StartService extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_StartService
|
||||||
|
> {
|
||||||
|
method: 'startService';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceName: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
service: data.IService;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_StopService extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_StopService
|
||||||
|
> {
|
||||||
|
method: 'stopService';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceName: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
service: data.IService;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_RestartService extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_RestartService
|
||||||
|
> {
|
||||||
|
method: 'restartService';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceName: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
service: data.IService;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetServiceLogs extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetServiceLogs
|
||||||
|
> {
|
||||||
|
method: 'getServiceLogs';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceName: string;
|
||||||
|
tail?: number;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
logs: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetServiceStats extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetServiceStats
|
||||||
|
> {
|
||||||
|
method: 'getServiceStats';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceName: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
stats: data.IContainerStats;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetServiceMetrics extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetServiceMetrics
|
||||||
|
> {
|
||||||
|
method: 'getServiceMetrics';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceName: string;
|
||||||
|
limit?: number;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
metrics: data.IMetric[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetServicePlatformResources extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetServicePlatformResources
|
||||||
|
> {
|
||||||
|
method: 'getServicePlatformResources';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceName: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
resources: data.IPlatformResource[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetServiceBackups extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetServiceBackups
|
||||||
|
> {
|
||||||
|
method: 'getServiceBackups';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceName: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
backups: data.IBackup[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_CreateServiceBackup extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_CreateServiceBackup
|
||||||
|
> {
|
||||||
|
method: 'createServiceBackup';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceName: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
backup: data.IBackup;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetServiceBackupSchedules extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetServiceBackupSchedules
|
||||||
|
> {
|
||||||
|
method: 'getServiceBackupSchedules';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
serviceName: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
schedules: data.IBackupSchedule[];
|
||||||
|
};
|
||||||
|
}
|
||||||
56
ts_interfaces/requests/settings.ts
Normal file
56
ts_interfaces/requests/settings.ts
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
import * as plugins from '../plugins.ts';
|
||||||
|
import * as data from '../data/index.ts';
|
||||||
|
|
||||||
|
export interface IReq_GetSettings extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetSettings
|
||||||
|
> {
|
||||||
|
method: 'getSettings';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
settings: data.ISettings;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_UpdateSettings extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_UpdateSettings
|
||||||
|
> {
|
||||||
|
method: 'updateSettings';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
settings: Partial<data.ISettings>;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
settings: data.ISettings;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_SetBackupPassword extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_SetBackupPassword
|
||||||
|
> {
|
||||||
|
method: 'setBackupPassword';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
password: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
ok: boolean;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetBackupPasswordStatus extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetBackupPasswordStatus
|
||||||
|
> {
|
||||||
|
method: 'getBackupPasswordStatus';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
status: data.IBackupPasswordStatus;
|
||||||
|
};
|
||||||
|
}
|
||||||
57
ts_interfaces/requests/ssl.ts
Normal file
57
ts_interfaces/requests/ssl.ts
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
import * as plugins from '../plugins.ts';
|
||||||
|
import * as data from '../data/index.ts';
|
||||||
|
|
||||||
|
export interface IReq_ObtainCertificate extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_ObtainCertificate
|
||||||
|
> {
|
||||||
|
method: 'obtainCertificate';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
domain: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
certificate: data.ICertificate;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_ListCertificates extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_ListCertificates
|
||||||
|
> {
|
||||||
|
method: 'listCertificates';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
certificates: data.ICertificate[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_GetCertificate extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetCertificate
|
||||||
|
> {
|
||||||
|
method: 'getCertificate';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
domain: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
certificate: data.ICertificate;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IReq_RenewCertificate extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_RenewCertificate
|
||||||
|
> {
|
||||||
|
method: 'renewCertificate';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
domain: string;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
certificate: data.ICertificate;
|
||||||
|
};
|
||||||
|
}
|
||||||
15
ts_interfaces/requests/status.ts
Normal file
15
ts_interfaces/requests/status.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import * as plugins from '../plugins.ts';
|
||||||
|
import * as data from '../data/index.ts';
|
||||||
|
|
||||||
|
export interface IReq_GetSystemStatus extends plugins.typedrequestInterfaces.implementsTR<
|
||||||
|
plugins.typedrequestInterfaces.ITypedRequest,
|
||||||
|
IReq_GetSystemStatus
|
||||||
|
> {
|
||||||
|
method: 'getSystemStatus';
|
||||||
|
request: {
|
||||||
|
identity: data.IIdentity;
|
||||||
|
};
|
||||||
|
response: {
|
||||||
|
status: data.ISystemStatus;
|
||||||
|
};
|
||||||
|
}
|
||||||
8
ts_web/00_commitinfo_data.ts
Normal file
8
ts_web/00_commitinfo_data.ts
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
/**
|
||||||
|
* autocreated commitinfo by @push.rocks/commitinfo
|
||||||
|
*/
|
||||||
|
export const commitinfo = {
|
||||||
|
name: '@serve.zone/onebox',
|
||||||
|
version: '1.14.1',
|
||||||
|
description: 'Self-hosted container platform with automatic SSL and DNS - a mini Heroku for single servers'
|
||||||
|
}
|
||||||
919
ts_web/appstate.ts
Normal file
919
ts_web/appstate.ts
Normal file
@@ -0,0 +1,919 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import * as interfaces from '../ts_interfaces/index.js';
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Smartstate instance
|
||||||
|
// ============================================================================
|
||||||
|
export const appState = new plugins.domtools.plugins.smartstate.Smartstate();
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// State Part Interfaces
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export interface ILoginState {
|
||||||
|
identity: interfaces.data.IIdentity | null;
|
||||||
|
isLoggedIn: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ISystemState {
|
||||||
|
status: interfaces.data.ISystemStatus | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IServicesState {
|
||||||
|
services: interfaces.data.IService[];
|
||||||
|
currentService: interfaces.data.IService | null;
|
||||||
|
currentServiceLogs: interfaces.data.ILogEntry[];
|
||||||
|
currentServiceStats: interfaces.data.IContainerStats | null;
|
||||||
|
platformServices: interfaces.data.IPlatformService[];
|
||||||
|
currentPlatformService: interfaces.data.IPlatformService | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface INetworkState {
|
||||||
|
targets: interfaces.data.INetworkTarget[];
|
||||||
|
stats: interfaces.data.INetworkStats | null;
|
||||||
|
trafficStats: interfaces.data.ITrafficStats | null;
|
||||||
|
dnsRecords: interfaces.data.IDnsRecord[];
|
||||||
|
domains: interfaces.data.IDomainDetail[];
|
||||||
|
certificates: interfaces.data.ICertificate[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IRegistriesState {
|
||||||
|
tokens: interfaces.data.IRegistryToken[];
|
||||||
|
registryStatus: { running: boolean; port: number } | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IBackupsState {
|
||||||
|
backups: interfaces.data.IBackup[];
|
||||||
|
schedules: interfaces.data.IBackupSchedule[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ISettingsState {
|
||||||
|
settings: interfaces.data.ISettings | null;
|
||||||
|
backupPasswordConfigured: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IUiState {
|
||||||
|
activeView: string;
|
||||||
|
autoRefresh: boolean;
|
||||||
|
refreshInterval: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// State Parts
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export const loginStatePart = await appState.getStatePart<ILoginState>(
|
||||||
|
'login',
|
||||||
|
{
|
||||||
|
identity: null,
|
||||||
|
isLoggedIn: false,
|
||||||
|
},
|
||||||
|
'persistent',
|
||||||
|
);
|
||||||
|
|
||||||
|
export const systemStatePart = await appState.getStatePart<ISystemState>(
|
||||||
|
'system',
|
||||||
|
{
|
||||||
|
status: null,
|
||||||
|
},
|
||||||
|
'soft',
|
||||||
|
);
|
||||||
|
|
||||||
|
export const servicesStatePart = await appState.getStatePart<IServicesState>(
|
||||||
|
'services',
|
||||||
|
{
|
||||||
|
services: [],
|
||||||
|
currentService: null,
|
||||||
|
currentServiceLogs: [],
|
||||||
|
currentServiceStats: null,
|
||||||
|
platformServices: [],
|
||||||
|
currentPlatformService: null,
|
||||||
|
},
|
||||||
|
'soft',
|
||||||
|
);
|
||||||
|
|
||||||
|
export const networkStatePart = await appState.getStatePart<INetworkState>(
|
||||||
|
'network',
|
||||||
|
{
|
||||||
|
targets: [],
|
||||||
|
stats: null,
|
||||||
|
trafficStats: null,
|
||||||
|
dnsRecords: [],
|
||||||
|
domains: [],
|
||||||
|
certificates: [],
|
||||||
|
},
|
||||||
|
'soft',
|
||||||
|
);
|
||||||
|
|
||||||
|
export const registriesStatePart = await appState.getStatePart<IRegistriesState>(
|
||||||
|
'registries',
|
||||||
|
{
|
||||||
|
tokens: [],
|
||||||
|
registryStatus: null,
|
||||||
|
},
|
||||||
|
'soft',
|
||||||
|
);
|
||||||
|
|
||||||
|
export const backupsStatePart = await appState.getStatePart<IBackupsState>(
|
||||||
|
'backups',
|
||||||
|
{
|
||||||
|
backups: [],
|
||||||
|
schedules: [],
|
||||||
|
},
|
||||||
|
'soft',
|
||||||
|
);
|
||||||
|
|
||||||
|
export const settingsStatePart = await appState.getStatePart<ISettingsState>(
|
||||||
|
'settings',
|
||||||
|
{
|
||||||
|
settings: null,
|
||||||
|
backupPasswordConfigured: false,
|
||||||
|
},
|
||||||
|
'soft',
|
||||||
|
);
|
||||||
|
|
||||||
|
export const uiStatePart = await appState.getStatePart<IUiState>(
|
||||||
|
'ui',
|
||||||
|
{
|
||||||
|
activeView: 'dashboard',
|
||||||
|
autoRefresh: true,
|
||||||
|
refreshInterval: 30000,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Helpers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
interface IActionContext {
|
||||||
|
identity: interfaces.data.IIdentity | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const getActionContext = (): IActionContext => {
|
||||||
|
return { identity: loginStatePart.getState().identity };
|
||||||
|
};
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Login Actions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export const loginAction = loginStatePart.createAction<{
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
}>(async (statePartArg, dataArg) => {
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_AdminLoginWithUsernameAndPassword
|
||||||
|
>('/typedrequest', 'adminLoginWithUsernameAndPassword');
|
||||||
|
|
||||||
|
const response = await typedRequest.fire({
|
||||||
|
username: dataArg.username,
|
||||||
|
password: dataArg.password,
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
identity: response.identity,
|
||||||
|
isLoggedIn: true,
|
||||||
|
};
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Login failed:', err);
|
||||||
|
return { identity: null, isLoggedIn: false };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export const logoutAction = loginStatePart.createAction(async (statePartArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
if (context.identity) {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_AdminLogout
|
||||||
|
>('/typedrequest', 'adminLogout');
|
||||||
|
await typedRequest.fire({ identity: context.identity });
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Logout error:', err);
|
||||||
|
}
|
||||||
|
return { identity: null, isLoggedIn: false };
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// System Status Actions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export const fetchSystemStatusAction = systemStatePart.createAction(async (statePartArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetSystemStatus
|
||||||
|
>('/typedrequest', 'getSystemStatus');
|
||||||
|
const response = await typedRequest.fire({ identity: context.identity! });
|
||||||
|
return { status: response.status };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to fetch system status:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Services Actions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export const fetchServicesAction = servicesStatePart.createAction(async (statePartArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetServices
|
||||||
|
>('/typedrequest', 'getServices');
|
||||||
|
const response = await typedRequest.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), services: response.services };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to fetch services:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export const fetchServiceAction = servicesStatePart.createAction<{ name: string }>(
|
||||||
|
async (statePartArg, dataArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetService
|
||||||
|
>('/typedrequest', 'getService');
|
||||||
|
const response = await typedRequest.fire({
|
||||||
|
identity: context.identity!,
|
||||||
|
serviceName: dataArg.name,
|
||||||
|
});
|
||||||
|
return { ...statePartArg.getState(), currentService: response.service };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to fetch service:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export const createServiceAction = servicesStatePart.createAction<{
|
||||||
|
config: interfaces.data.IServiceCreate;
|
||||||
|
}>(async (statePartArg, dataArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_CreateService
|
||||||
|
>('/typedrequest', 'createService');
|
||||||
|
await typedRequest.fire({
|
||||||
|
identity: context.identity!,
|
||||||
|
serviceConfig: dataArg.config,
|
||||||
|
});
|
||||||
|
// Re-fetch services list
|
||||||
|
const listReq = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetServices
|
||||||
|
>('/typedrequest', 'getServices');
|
||||||
|
const listResp = await listReq.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), services: listResp.services };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to create service:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export const deleteServiceAction = servicesStatePart.createAction<{ name: string }>(
|
||||||
|
async (statePartArg, dataArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_DeleteService
|
||||||
|
>('/typedrequest', 'deleteService');
|
||||||
|
await typedRequest.fire({
|
||||||
|
identity: context.identity!,
|
||||||
|
serviceName: dataArg.name,
|
||||||
|
});
|
||||||
|
const state = statePartArg.getState();
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
services: state.services.filter((s) => s.name !== dataArg.name),
|
||||||
|
currentService: null,
|
||||||
|
};
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to delete service:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export const startServiceAction = servicesStatePart.createAction<{ name: string }>(
|
||||||
|
async (statePartArg, dataArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_StartService
|
||||||
|
>('/typedrequest', 'startService');
|
||||||
|
await typedRequest.fire({
|
||||||
|
identity: context.identity!,
|
||||||
|
serviceName: dataArg.name,
|
||||||
|
});
|
||||||
|
// Re-fetch services
|
||||||
|
const listReq = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetServices
|
||||||
|
>('/typedrequest', 'getServices');
|
||||||
|
const listResp = await listReq.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), services: listResp.services };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to start service:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export const stopServiceAction = servicesStatePart.createAction<{ name: string }>(
|
||||||
|
async (statePartArg, dataArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_StopService
|
||||||
|
>('/typedrequest', 'stopService');
|
||||||
|
await typedRequest.fire({
|
||||||
|
identity: context.identity!,
|
||||||
|
serviceName: dataArg.name,
|
||||||
|
});
|
||||||
|
const listReq = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetServices
|
||||||
|
>('/typedrequest', 'getServices');
|
||||||
|
const listResp = await listReq.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), services: listResp.services };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to stop service:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export const restartServiceAction = servicesStatePart.createAction<{ name: string }>(
|
||||||
|
async (statePartArg, dataArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_RestartService
|
||||||
|
>('/typedrequest', 'restartService');
|
||||||
|
await typedRequest.fire({
|
||||||
|
identity: context.identity!,
|
||||||
|
serviceName: dataArg.name,
|
||||||
|
});
|
||||||
|
const listReq = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetServices
|
||||||
|
>('/typedrequest', 'getServices');
|
||||||
|
const listResp = await listReq.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), services: listResp.services };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to restart service:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export const fetchServiceLogsAction = servicesStatePart.createAction<{
|
||||||
|
name: string;
|
||||||
|
lines?: number;
|
||||||
|
}>(async (statePartArg, dataArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetServiceLogs
|
||||||
|
>('/typedrequest', 'getServiceLogs');
|
||||||
|
const response = await typedRequest.fire({
|
||||||
|
identity: context.identity!,
|
||||||
|
serviceName: dataArg.name,
|
||||||
|
tail: dataArg.lines || 200,
|
||||||
|
});
|
||||||
|
return { ...statePartArg.getState(), currentServiceLogs: response.logs };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to fetch service logs:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export const fetchServiceStatsAction = servicesStatePart.createAction<{ name: string }>(
|
||||||
|
async (statePartArg, dataArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetServiceStats
|
||||||
|
>('/typedrequest', 'getServiceStats');
|
||||||
|
const response = await typedRequest.fire({
|
||||||
|
identity: context.identity!,
|
||||||
|
serviceName: dataArg.name,
|
||||||
|
});
|
||||||
|
return { ...statePartArg.getState(), currentServiceStats: response.stats };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to fetch service stats:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Platform Services Actions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export const fetchPlatformServicesAction = servicesStatePart.createAction(
|
||||||
|
async (statePartArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetPlatformServices
|
||||||
|
>('/typedrequest', 'getPlatformServices');
|
||||||
|
const response = await typedRequest.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), platformServices: response.platformServices };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to fetch platform services:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export const startPlatformServiceAction = servicesStatePart.createAction<{
|
||||||
|
serviceType: interfaces.data.TPlatformServiceType;
|
||||||
|
}>(async (statePartArg, dataArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_StartPlatformService
|
||||||
|
>('/typedrequest', 'startPlatformService');
|
||||||
|
await typedRequest.fire({
|
||||||
|
identity: context.identity!,
|
||||||
|
serviceType: dataArg.serviceType,
|
||||||
|
});
|
||||||
|
// Re-fetch platform services
|
||||||
|
const listReq = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetPlatformServices
|
||||||
|
>('/typedrequest', 'getPlatformServices');
|
||||||
|
const listResp = await listReq.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), platformServices: listResp.platformServices };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to start platform service:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export const stopPlatformServiceAction = servicesStatePart.createAction<{
|
||||||
|
serviceType: interfaces.data.TPlatformServiceType;
|
||||||
|
}>(async (statePartArg, dataArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_StopPlatformService
|
||||||
|
>('/typedrequest', 'stopPlatformService');
|
||||||
|
await typedRequest.fire({
|
||||||
|
identity: context.identity!,
|
||||||
|
serviceType: dataArg.serviceType,
|
||||||
|
});
|
||||||
|
const listReq = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetPlatformServices
|
||||||
|
>('/typedrequest', 'getPlatformServices');
|
||||||
|
const listResp = await listReq.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), platformServices: listResp.platformServices };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to stop platform service:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Network Actions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export const fetchNetworkTargetsAction = networkStatePart.createAction(async (statePartArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetNetworkTargets
|
||||||
|
>('/typedrequest', 'getNetworkTargets');
|
||||||
|
const response = await typedRequest.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), targets: response.targets };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to fetch network targets:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export const fetchNetworkStatsAction = networkStatePart.createAction(async (statePartArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetNetworkStats
|
||||||
|
>('/typedrequest', 'getNetworkStats');
|
||||||
|
const response = await typedRequest.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), stats: response.stats };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to fetch network stats:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export const fetchTrafficStatsAction = networkStatePart.createAction(async (statePartArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetTrafficStats
|
||||||
|
>('/typedrequest', 'getTrafficStats');
|
||||||
|
const response = await typedRequest.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), trafficStats: response.stats };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to fetch traffic stats:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export const fetchDnsRecordsAction = networkStatePart.createAction(async (statePartArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetDnsRecords
|
||||||
|
>('/typedrequest', 'getDnsRecords');
|
||||||
|
const response = await typedRequest.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), dnsRecords: response.records };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to fetch DNS records:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export const syncDnsAction = networkStatePart.createAction(async (statePartArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_SyncDns
|
||||||
|
>('/typedrequest', 'syncDns');
|
||||||
|
await typedRequest.fire({ identity: context.identity! });
|
||||||
|
// Re-fetch DNS records
|
||||||
|
const listReq = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetDnsRecords
|
||||||
|
>('/typedrequest', 'getDnsRecords');
|
||||||
|
const listResp = await listReq.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), dnsRecords: listResp.records };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to sync DNS:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export const fetchDomainsAction = networkStatePart.createAction(async (statePartArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetDomains
|
||||||
|
>('/typedrequest', 'getDomains');
|
||||||
|
const response = await typedRequest.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), domains: response.domains };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to fetch domains:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export const fetchCertificatesAction = networkStatePart.createAction(async (statePartArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_ListCertificates
|
||||||
|
>('/typedrequest', 'listCertificates');
|
||||||
|
const response = await typedRequest.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), certificates: response.certificates };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to fetch certificates:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export const renewCertificateAction = networkStatePart.createAction<{ domain: string }>(
|
||||||
|
async (statePartArg, dataArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_RenewCertificate
|
||||||
|
>('/typedrequest', 'renewCertificate');
|
||||||
|
await typedRequest.fire({
|
||||||
|
identity: context.identity!,
|
||||||
|
domain: dataArg.domain,
|
||||||
|
});
|
||||||
|
// Re-fetch certificates
|
||||||
|
const listReq = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_ListCertificates
|
||||||
|
>('/typedrequest', 'listCertificates');
|
||||||
|
const listResp = await listReq.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), certificates: listResp.certificates };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to renew certificate:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Registry Actions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export const fetchRegistryTokensAction = registriesStatePart.createAction(
|
||||||
|
async (statePartArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetRegistryTokens
|
||||||
|
>('/typedrequest', 'getRegistryTokens');
|
||||||
|
const response = await typedRequest.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), tokens: response.tokens };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to fetch registry tokens:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export const createRegistryTokenAction = registriesStatePart.createAction<{
|
||||||
|
token: interfaces.data.ICreateTokenRequest;
|
||||||
|
}>(async (statePartArg, dataArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_CreateRegistryToken
|
||||||
|
>('/typedrequest', 'createRegistryToken');
|
||||||
|
await typedRequest.fire({
|
||||||
|
identity: context.identity!,
|
||||||
|
token: dataArg.token,
|
||||||
|
});
|
||||||
|
// Re-fetch tokens
|
||||||
|
const listReq = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetRegistryTokens
|
||||||
|
>('/typedrequest', 'getRegistryTokens');
|
||||||
|
const listResp = await listReq.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), tokens: listResp.tokens };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to create registry token:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export const deleteRegistryTokenAction = registriesStatePart.createAction<{
|
||||||
|
tokenId: string;
|
||||||
|
}>(async (statePartArg, dataArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_DeleteRegistryToken
|
||||||
|
>('/typedrequest', 'deleteRegistryToken');
|
||||||
|
await typedRequest.fire({
|
||||||
|
identity: context.identity!,
|
||||||
|
tokenId: dataArg.tokenId,
|
||||||
|
});
|
||||||
|
const state = statePartArg.getState();
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
tokens: state.tokens.filter((t) => t.id !== dataArg.tokenId),
|
||||||
|
};
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to delete registry token:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Backups Actions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export const fetchBackupsAction = backupsStatePart.createAction(async (statePartArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetBackups
|
||||||
|
>('/typedrequest', 'getBackups');
|
||||||
|
const response = await typedRequest.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), backups: response.backups };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to fetch backups:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export const deleteBackupAction = backupsStatePart.createAction<{ backupId: number }>(
|
||||||
|
async (statePartArg, dataArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_DeleteBackup
|
||||||
|
>('/typedrequest', 'deleteBackup');
|
||||||
|
await typedRequest.fire({
|
||||||
|
identity: context.identity!,
|
||||||
|
backupId: dataArg.backupId,
|
||||||
|
});
|
||||||
|
const state = statePartArg.getState();
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
backups: state.backups.filter((b) => b.id !== dataArg.backupId),
|
||||||
|
};
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to delete backup:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export const fetchSchedulesAction = backupsStatePart.createAction(async (statePartArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetBackupSchedules
|
||||||
|
>('/typedrequest', 'getBackupSchedules');
|
||||||
|
const response = await typedRequest.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), schedules: response.schedules };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to fetch schedules:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export const createScheduleAction = backupsStatePart.createAction<{
|
||||||
|
config: interfaces.data.IBackupScheduleCreate;
|
||||||
|
}>(async (statePartArg, dataArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_CreateBackupSchedule
|
||||||
|
>('/typedrequest', 'createBackupSchedule');
|
||||||
|
await typedRequest.fire({
|
||||||
|
identity: context.identity!,
|
||||||
|
scheduleConfig: dataArg.config,
|
||||||
|
});
|
||||||
|
// Re-fetch schedules
|
||||||
|
const listReq = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetBackupSchedules
|
||||||
|
>('/typedrequest', 'getBackupSchedules');
|
||||||
|
const listResp = await listReq.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), schedules: listResp.schedules };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to create schedule:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export const deleteScheduleAction = backupsStatePart.createAction<{ scheduleId: number }>(
|
||||||
|
async (statePartArg, dataArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_DeleteBackupSchedule
|
||||||
|
>('/typedrequest', 'deleteBackupSchedule');
|
||||||
|
await typedRequest.fire({
|
||||||
|
identity: context.identity!,
|
||||||
|
scheduleId: dataArg.scheduleId,
|
||||||
|
});
|
||||||
|
const state = statePartArg.getState();
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
schedules: state.schedules.filter((s) => s.id !== dataArg.scheduleId),
|
||||||
|
};
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to delete schedule:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export const triggerScheduleAction = backupsStatePart.createAction<{ scheduleId: number }>(
|
||||||
|
async (statePartArg, dataArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_TriggerBackupSchedule
|
||||||
|
>('/typedrequest', 'triggerBackupSchedule');
|
||||||
|
await typedRequest.fire({
|
||||||
|
identity: context.identity!,
|
||||||
|
scheduleId: dataArg.scheduleId,
|
||||||
|
});
|
||||||
|
// Re-fetch backups
|
||||||
|
const backupsReq = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetBackups
|
||||||
|
>('/typedrequest', 'getBackups');
|
||||||
|
const backupsResp = await backupsReq.fire({ identity: context.identity! });
|
||||||
|
return { ...statePartArg.getState(), backups: backupsResp.backups };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to trigger schedule:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Settings Actions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export const fetchSettingsAction = settingsStatePart.createAction(async (statePartArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const [settingsResp, passwordResp] = await Promise.all([
|
||||||
|
new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetSettings
|
||||||
|
>('/typedrequest', 'getSettings').fire({ identity: context.identity! }),
|
||||||
|
new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetBackupPasswordStatus
|
||||||
|
>('/typedrequest', 'getBackupPasswordStatus').fire({ identity: context.identity! }),
|
||||||
|
]);
|
||||||
|
return {
|
||||||
|
settings: settingsResp.settings,
|
||||||
|
backupPasswordConfigured: passwordResp.status.isConfigured,
|
||||||
|
};
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to fetch settings:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export const updateSettingsAction = settingsStatePart.createAction<{
|
||||||
|
settings: Partial<interfaces.data.ISettings>;
|
||||||
|
}>(async (statePartArg, dataArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_UpdateSettings
|
||||||
|
>('/typedrequest', 'updateSettings');
|
||||||
|
const response = await typedRequest.fire({
|
||||||
|
identity: context.identity!,
|
||||||
|
settings: dataArg.settings,
|
||||||
|
});
|
||||||
|
return { ...statePartArg.getState(), settings: response.settings };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to update settings:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export const setBackupPasswordAction = settingsStatePart.createAction<{ password: string }>(
|
||||||
|
async (statePartArg, dataArg) => {
|
||||||
|
const context = getActionContext();
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_SetBackupPassword
|
||||||
|
>('/typedrequest', 'setBackupPassword');
|
||||||
|
await typedRequest.fire({
|
||||||
|
identity: context.identity!,
|
||||||
|
password: dataArg.password,
|
||||||
|
});
|
||||||
|
return { ...statePartArg.getState(), backupPasswordConfigured: true };
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to set backup password:', err);
|
||||||
|
return statePartArg.getState();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// UI Actions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export const setActiveViewAction = uiStatePart.createAction<{ view: string }>(
|
||||||
|
async (statePartArg, dataArg) => {
|
||||||
|
return { ...statePartArg.getState(), activeView: dataArg.view };
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export const toggleAutoRefreshAction = uiStatePart.createAction(async (statePartArg) => {
|
||||||
|
const state = statePartArg.getState();
|
||||||
|
return { ...state, autoRefresh: !state.autoRefresh };
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Auto-refresh system
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
let refreshIntervalHandle: ReturnType<typeof setInterval> | null = null;
|
||||||
|
|
||||||
|
const dispatchCombinedRefreshAction = async () => {
|
||||||
|
const loginState = loginStatePart.getState();
|
||||||
|
if (!loginState.isLoggedIn) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await systemStatePart.dispatchAction(fetchSystemStatusAction, null);
|
||||||
|
} catch (err) {
|
||||||
|
// Silently fail on auto-refresh
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const startAutoRefresh = () => {
|
||||||
|
const uiState = uiStatePart.getState();
|
||||||
|
const loginState = loginStatePart.getState();
|
||||||
|
|
||||||
|
if (uiState.autoRefresh && loginState.isLoggedIn) {
|
||||||
|
if (refreshIntervalHandle) {
|
||||||
|
clearInterval(refreshIntervalHandle);
|
||||||
|
}
|
||||||
|
refreshIntervalHandle = setInterval(() => {
|
||||||
|
dispatchCombinedRefreshAction();
|
||||||
|
}, uiState.refreshInterval);
|
||||||
|
} else {
|
||||||
|
if (refreshIntervalHandle) {
|
||||||
|
clearInterval(refreshIntervalHandle);
|
||||||
|
refreshIntervalHandle = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
uiStatePart.select((s) => s).subscribe(() => startAutoRefresh());
|
||||||
|
loginStatePart.select((s) => s).subscribe(() => startAutoRefresh());
|
||||||
|
startAutoRefresh();
|
||||||
13
ts_web/elements/index.ts
Normal file
13
ts_web/elements/index.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
// Shared utilities
|
||||||
|
export * from './shared/index.js';
|
||||||
|
|
||||||
|
// App shell
|
||||||
|
export * from './ob-app-shell.js';
|
||||||
|
|
||||||
|
// View elements
|
||||||
|
export * from './ob-view-dashboard.js';
|
||||||
|
export * from './ob-view-services.js';
|
||||||
|
export * from './ob-view-network.js';
|
||||||
|
export * from './ob-view-registries.js';
|
||||||
|
export * from './ob-view-tokens.js';
|
||||||
|
export * from './ob-view-settings.js';
|
||||||
207
ts_web/elements/ob-app-shell.ts
Normal file
207
ts_web/elements/ob-app-shell.ts
Normal file
@@ -0,0 +1,207 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import * as appstate from '../appstate.js';
|
||||||
|
import * as interfaces from '../../ts_interfaces/index.js';
|
||||||
|
import {
|
||||||
|
DeesElement,
|
||||||
|
customElement,
|
||||||
|
html,
|
||||||
|
state,
|
||||||
|
css,
|
||||||
|
cssManager,
|
||||||
|
type TemplateResult,
|
||||||
|
} from '@design.estate/dees-element';
|
||||||
|
|
||||||
|
import type { ObViewDashboard } from './ob-view-dashboard.js';
|
||||||
|
import type { ObViewServices } from './ob-view-services.js';
|
||||||
|
import type { ObViewNetwork } from './ob-view-network.js';
|
||||||
|
import type { ObViewRegistries } from './ob-view-registries.js';
|
||||||
|
import type { ObViewTokens } from './ob-view-tokens.js';
|
||||||
|
import type { ObViewSettings } from './ob-view-settings.js';
|
||||||
|
|
||||||
|
@customElement('ob-app-shell')
|
||||||
|
export class ObAppShell extends DeesElement {
|
||||||
|
@state()
|
||||||
|
accessor loginState: appstate.ILoginState = { identity: null, isLoggedIn: false };
|
||||||
|
|
||||||
|
@state()
|
||||||
|
accessor uiState: appstate.IUiState = {
|
||||||
|
activeView: 'dashboard',
|
||||||
|
autoRefresh: true,
|
||||||
|
refreshInterval: 30000,
|
||||||
|
};
|
||||||
|
|
||||||
|
@state()
|
||||||
|
accessor loginLoading: boolean = false;
|
||||||
|
|
||||||
|
@state()
|
||||||
|
accessor loginError: string = '';
|
||||||
|
|
||||||
|
private viewTabs = [
|
||||||
|
{ name: 'Dashboard', element: (async () => (await import('./ob-view-dashboard.js')).ObViewDashboard)() },
|
||||||
|
{ name: 'Services', element: (async () => (await import('./ob-view-services.js')).ObViewServices)() },
|
||||||
|
{ name: 'Network', element: (async () => (await import('./ob-view-network.js')).ObViewNetwork)() },
|
||||||
|
{ name: 'Registries', element: (async () => (await import('./ob-view-registries.js')).ObViewRegistries)() },
|
||||||
|
{ name: 'Tokens', element: (async () => (await import('./ob-view-tokens.js')).ObViewTokens)() },
|
||||||
|
{ name: 'Settings', element: (async () => (await import('./ob-view-settings.js')).ObViewSettings)() },
|
||||||
|
];
|
||||||
|
|
||||||
|
private resolvedViewTabs: Array<{ name: string; element: any }> = [];
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
document.title = 'Onebox';
|
||||||
|
|
||||||
|
const loginSubscription = appstate.loginStatePart
|
||||||
|
.select((stateArg) => stateArg)
|
||||||
|
.subscribe((loginState) => {
|
||||||
|
this.loginState = loginState;
|
||||||
|
if (loginState.isLoggedIn) {
|
||||||
|
appstate.systemStatePart.dispatchAction(appstate.fetchSystemStatusAction, null);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
this.rxSubscriptions.push(loginSubscription);
|
||||||
|
|
||||||
|
const uiSubscription = appstate.uiStatePart
|
||||||
|
.select((stateArg) => stateArg)
|
||||||
|
.subscribe((uiState) => {
|
||||||
|
this.uiState = uiState;
|
||||||
|
this.syncAppdashView(uiState.activeView);
|
||||||
|
});
|
||||||
|
this.rxSubscriptions.push(uiSubscription);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static styles = [
|
||||||
|
cssManager.defaultStyles,
|
||||||
|
css`
|
||||||
|
:host {
|
||||||
|
display: block;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
}
|
||||||
|
.maincontainer {
|
||||||
|
width: 100%;
|
||||||
|
height: 100vh;
|
||||||
|
}
|
||||||
|
`,
|
||||||
|
];
|
||||||
|
|
||||||
|
public render(): TemplateResult {
|
||||||
|
return html`
|
||||||
|
<div class="maincontainer">
|
||||||
|
<dees-simple-login name="Onebox">
|
||||||
|
<dees-simple-appdash
|
||||||
|
name="Onebox"
|
||||||
|
.viewTabs=${this.resolvedViewTabs}
|
||||||
|
>
|
||||||
|
</dees-simple-appdash>
|
||||||
|
</dees-simple-login>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async firstUpdated() {
|
||||||
|
// Resolve async view tab imports
|
||||||
|
this.resolvedViewTabs = await Promise.all(
|
||||||
|
this.viewTabs.map(async (tab) => ({
|
||||||
|
name: tab.name,
|
||||||
|
element: await tab.element,
|
||||||
|
})),
|
||||||
|
);
|
||||||
|
this.requestUpdate();
|
||||||
|
await this.updateComplete;
|
||||||
|
|
||||||
|
const simpleLogin = this.shadowRoot!.querySelector('dees-simple-login') as any;
|
||||||
|
if (simpleLogin) {
|
||||||
|
simpleLogin.addEventListener('login', (e: CustomEvent) => {
|
||||||
|
this.login(e.detail.data.username, e.detail.data.password);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const appDash = this.shadowRoot!.querySelector('dees-simple-appdash') as any;
|
||||||
|
if (appDash) {
|
||||||
|
appDash.addEventListener('view-select', (e: CustomEvent) => {
|
||||||
|
const viewName = e.detail.view.name.toLowerCase();
|
||||||
|
appstate.uiStatePart.dispatchAction(appstate.setActiveViewAction, { view: viewName });
|
||||||
|
});
|
||||||
|
appDash.addEventListener('logout', async () => {
|
||||||
|
await appstate.loginStatePart.dispatchAction(appstate.logoutAction, null);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load the initial view on the appdash now that tabs are resolved
|
||||||
|
// (appdash's own firstUpdated already fired when viewTabs was still empty)
|
||||||
|
if (appDash && this.resolvedViewTabs.length > 0) {
|
||||||
|
const initialView = this.resolvedViewTabs.find(
|
||||||
|
(t) => t.name.toLowerCase() === this.uiState.activeView,
|
||||||
|
) || this.resolvedViewTabs[0];
|
||||||
|
await appDash.loadView(initialView);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for stored session (persistent login state)
|
||||||
|
const loginState = appstate.loginStatePart.getState();
|
||||||
|
if (loginState.identity?.jwt) {
|
||||||
|
if (loginState.identity.expiresAt > Date.now()) {
|
||||||
|
// Validate token with server before switching to dashboard
|
||||||
|
// (server may have restarted with a new JWT secret)
|
||||||
|
try {
|
||||||
|
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
|
||||||
|
interfaces.requests.IReq_GetSystemStatus
|
||||||
|
>('/typedrequest', 'getSystemStatus');
|
||||||
|
const response = await typedRequest.fire({ identity: loginState.identity });
|
||||||
|
// Token is valid - switch to dashboard
|
||||||
|
appstate.systemStatePart.setState({ status: response.status });
|
||||||
|
this.loginState = loginState;
|
||||||
|
if (simpleLogin) {
|
||||||
|
await simpleLogin.switchToSlottedContent();
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
// Token rejected by server - clear session
|
||||||
|
console.warn('Stored session invalid, returning to login:', err);
|
||||||
|
await appstate.loginStatePart.dispatchAction(appstate.logoutAction, null);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
await appstate.loginStatePart.dispatchAction(appstate.logoutAction, null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async login(username: string, password: string) {
|
||||||
|
const domtools = await this.domtoolsPromise;
|
||||||
|
const simpleLogin = this.shadowRoot!.querySelector('dees-simple-login') as any;
|
||||||
|
const form = simpleLogin?.shadowRoot?.querySelector('dees-form') as any;
|
||||||
|
|
||||||
|
if (form) {
|
||||||
|
form.setStatus('pending', 'Logging in...');
|
||||||
|
}
|
||||||
|
|
||||||
|
const newState = await appstate.loginStatePart.dispatchAction(appstate.loginAction, {
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (newState.identity) {
|
||||||
|
if (form) {
|
||||||
|
form.setStatus('success', 'Logged in!');
|
||||||
|
}
|
||||||
|
if (simpleLogin) {
|
||||||
|
await simpleLogin.switchToSlottedContent();
|
||||||
|
}
|
||||||
|
await appstate.systemStatePart.dispatchAction(appstate.fetchSystemStatusAction, null);
|
||||||
|
} else {
|
||||||
|
if (form) {
|
||||||
|
form.setStatus('error', 'Login failed!');
|
||||||
|
await domtools.convenience.smartdelay.delayFor(2000);
|
||||||
|
form.reset();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private syncAppdashView(viewName: string): void {
|
||||||
|
const appDash = this.shadowRoot?.querySelector('dees-simple-appdash') as any;
|
||||||
|
if (!appDash || this.resolvedViewTabs.length === 0) return;
|
||||||
|
const targetTab = this.resolvedViewTabs.find((t) => t.name.toLowerCase() === viewName);
|
||||||
|
if (!targetTab) return;
|
||||||
|
// Use appdash's own loadView method for proper view management
|
||||||
|
appDash.loadView(targetTab);
|
||||||
|
}
|
||||||
|
}
|
||||||
164
ts_web/elements/ob-view-dashboard.ts
Normal file
164
ts_web/elements/ob-view-dashboard.ts
Normal file
@@ -0,0 +1,164 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import * as shared from './shared/index.js';
|
||||||
|
import * as appstate from '../appstate.js';
|
||||||
|
import {
|
||||||
|
DeesElement,
|
||||||
|
customElement,
|
||||||
|
html,
|
||||||
|
state,
|
||||||
|
css,
|
||||||
|
cssManager,
|
||||||
|
type TemplateResult,
|
||||||
|
} from '@design.estate/dees-element';
|
||||||
|
|
||||||
|
@customElement('ob-view-dashboard')
|
||||||
|
export class ObViewDashboard extends DeesElement {
|
||||||
|
@state()
|
||||||
|
accessor systemState: appstate.ISystemState = { status: null };
|
||||||
|
|
||||||
|
@state()
|
||||||
|
accessor servicesState: appstate.IServicesState = {
|
||||||
|
services: [],
|
||||||
|
currentService: null,
|
||||||
|
currentServiceLogs: [],
|
||||||
|
currentServiceStats: null,
|
||||||
|
platformServices: [],
|
||||||
|
currentPlatformService: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
@state()
|
||||||
|
accessor networkState: appstate.INetworkState = {
|
||||||
|
targets: [],
|
||||||
|
stats: null,
|
||||||
|
trafficStats: null,
|
||||||
|
dnsRecords: [],
|
||||||
|
domains: [],
|
||||||
|
certificates: [],
|
||||||
|
};
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
|
||||||
|
const systemSub = appstate.systemStatePart
|
||||||
|
.select((s) => s)
|
||||||
|
.subscribe((newState) => {
|
||||||
|
this.systemState = newState;
|
||||||
|
});
|
||||||
|
this.rxSubscriptions.push(systemSub);
|
||||||
|
|
||||||
|
const servicesSub = appstate.servicesStatePart
|
||||||
|
.select((s) => s)
|
||||||
|
.subscribe((newState) => {
|
||||||
|
this.servicesState = newState;
|
||||||
|
});
|
||||||
|
this.rxSubscriptions.push(servicesSub);
|
||||||
|
|
||||||
|
const networkSub = appstate.networkStatePart
|
||||||
|
.select((s) => s)
|
||||||
|
.subscribe((newState) => {
|
||||||
|
this.networkState = newState;
|
||||||
|
});
|
||||||
|
this.rxSubscriptions.push(networkSub);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static styles = [
|
||||||
|
cssManager.defaultStyles,
|
||||||
|
shared.viewHostCss,
|
||||||
|
css``,
|
||||||
|
];
|
||||||
|
|
||||||
|
async connectedCallback() {
|
||||||
|
super.connectedCallback();
|
||||||
|
await Promise.all([
|
||||||
|
appstate.systemStatePart.dispatchAction(appstate.fetchSystemStatusAction, null),
|
||||||
|
appstate.servicesStatePart.dispatchAction(appstate.fetchServicesAction, null),
|
||||||
|
appstate.servicesStatePart.dispatchAction(appstate.fetchPlatformServicesAction, null),
|
||||||
|
appstate.networkStatePart.dispatchAction(appstate.fetchNetworkStatsAction, null),
|
||||||
|
appstate.networkStatePart.dispatchAction(appstate.fetchCertificatesAction, null),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
public render(): TemplateResult {
|
||||||
|
const status = this.systemState.status;
|
||||||
|
const services = this.servicesState.services;
|
||||||
|
const platformServices = this.servicesState.platformServices;
|
||||||
|
const networkStats = this.networkState.stats;
|
||||||
|
const certificates = this.networkState.certificates;
|
||||||
|
|
||||||
|
const runningServices = services.filter((s) => s.status === 'running').length;
|
||||||
|
const stoppedServices = services.filter((s) => s.status === 'stopped').length;
|
||||||
|
|
||||||
|
const validCerts = certificates.filter((c) => c.isValid).length;
|
||||||
|
const expiringCerts = certificates.filter(
|
||||||
|
(c) => c.isValid && c.expiresAt && c.expiresAt - Date.now() < 30 * 24 * 60 * 60 * 1000,
|
||||||
|
).length;
|
||||||
|
const expiredCerts = certificates.filter((c) => !c.isValid).length;
|
||||||
|
|
||||||
|
return html`
|
||||||
|
<ob-sectionheading>Dashboard</ob-sectionheading>
|
||||||
|
<sz-dashboard-view
|
||||||
|
.data=${{
|
||||||
|
cluster: {
|
||||||
|
totalServices: services.length,
|
||||||
|
running: runningServices,
|
||||||
|
stopped: stoppedServices,
|
||||||
|
dockerStatus: status?.docker?.running ? 'running' : 'stopped',
|
||||||
|
},
|
||||||
|
resourceUsage: {
|
||||||
|
cpu: status?.docker?.cpuUsage || 0,
|
||||||
|
memoryUsed: status?.docker?.memoryUsage || 0,
|
||||||
|
memoryTotal: status?.docker?.memoryTotal || 0,
|
||||||
|
networkIn: 0,
|
||||||
|
networkOut: 0,
|
||||||
|
topConsumers: [],
|
||||||
|
},
|
||||||
|
platformServices: platformServices.map((ps) => ({
|
||||||
|
name: ps.displayName,
|
||||||
|
status: ps.status === 'running' ? 'running' : 'stopped',
|
||||||
|
running: ps.status === 'running',
|
||||||
|
})),
|
||||||
|
traffic: {
|
||||||
|
requests: 0,
|
||||||
|
errors: 0,
|
||||||
|
errorPercent: 0,
|
||||||
|
avgResponse: 0,
|
||||||
|
reqPerMin: 0,
|
||||||
|
status2xx: 0,
|
||||||
|
status3xx: 0,
|
||||||
|
status4xx: 0,
|
||||||
|
status5xx: 0,
|
||||||
|
},
|
||||||
|
proxy: {
|
||||||
|
httpPort: networkStats?.proxy?.httpPort || 80,
|
||||||
|
httpsPort: networkStats?.proxy?.httpsPort || 443,
|
||||||
|
httpActive: networkStats?.proxy?.running || false,
|
||||||
|
httpsActive: networkStats?.proxy?.running || false,
|
||||||
|
routeCount: networkStats?.proxy?.routes || 0,
|
||||||
|
},
|
||||||
|
certificates: {
|
||||||
|
valid: validCerts,
|
||||||
|
expiring: expiringCerts,
|
||||||
|
expired: expiredCerts,
|
||||||
|
},
|
||||||
|
dnsConfigured: true,
|
||||||
|
acmeConfigured: true,
|
||||||
|
quickActions: [
|
||||||
|
{ label: 'Deploy Service', icon: 'lucide:Plus', primary: true },
|
||||||
|
{ label: 'Add Domain', icon: 'lucide:Globe' },
|
||||||
|
{ label: 'View Logs', icon: 'lucide:FileText' },
|
||||||
|
],
|
||||||
|
}}
|
||||||
|
@action-click=${(e: CustomEvent) => this.handleQuickAction(e)}
|
||||||
|
></sz-dashboard-view>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
private handleQuickAction(e: CustomEvent) {
|
||||||
|
const action = e.detail?.action || e.detail?.label;
|
||||||
|
if (action === 'Deploy Service') {
|
||||||
|
appstate.uiStatePart.dispatchAction(appstate.setActiveViewAction, { view: 'services' });
|
||||||
|
} else if (action === 'Add Domain') {
|
||||||
|
appstate.uiStatePart.dispatchAction(appstate.setActiveViewAction, { view: 'network' });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
197
ts_web/elements/ob-view-network.ts
Normal file
197
ts_web/elements/ob-view-network.ts
Normal file
@@ -0,0 +1,197 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import * as shared from './shared/index.js';
|
||||||
|
import * as appstate from '../appstate.js';
|
||||||
|
import {
|
||||||
|
DeesElement,
|
||||||
|
customElement,
|
||||||
|
html,
|
||||||
|
state,
|
||||||
|
css,
|
||||||
|
cssManager,
|
||||||
|
type TemplateResult,
|
||||||
|
} from '@design.estate/dees-element';
|
||||||
|
|
||||||
|
@customElement('ob-view-network')
|
||||||
|
export class ObViewNetwork extends DeesElement {
|
||||||
|
@state()
|
||||||
|
accessor networkState: appstate.INetworkState = {
|
||||||
|
targets: [],
|
||||||
|
stats: null,
|
||||||
|
trafficStats: null,
|
||||||
|
dnsRecords: [],
|
||||||
|
domains: [],
|
||||||
|
certificates: [],
|
||||||
|
};
|
||||||
|
|
||||||
|
@state()
|
||||||
|
accessor currentTab: 'proxy' | 'dns' | 'domains' | 'domain-detail' = 'proxy';
|
||||||
|
|
||||||
|
@state()
|
||||||
|
accessor selectedDomain: string = '';
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
|
||||||
|
const networkSub = appstate.networkStatePart
|
||||||
|
.select((s) => s)
|
||||||
|
.subscribe((newState) => {
|
||||||
|
this.networkState = newState;
|
||||||
|
});
|
||||||
|
this.rxSubscriptions.push(networkSub);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static styles = [
|
||||||
|
cssManager.defaultStyles,
|
||||||
|
shared.viewHostCss,
|
||||||
|
css``,
|
||||||
|
];
|
||||||
|
|
||||||
|
async connectedCallback() {
|
||||||
|
super.connectedCallback();
|
||||||
|
await Promise.all([
|
||||||
|
appstate.networkStatePart.dispatchAction(appstate.fetchNetworkTargetsAction, null),
|
||||||
|
appstate.networkStatePart.dispatchAction(appstate.fetchNetworkStatsAction, null),
|
||||||
|
appstate.networkStatePart.dispatchAction(appstate.fetchTrafficStatsAction, null),
|
||||||
|
appstate.networkStatePart.dispatchAction(appstate.fetchDnsRecordsAction, null),
|
||||||
|
appstate.networkStatePart.dispatchAction(appstate.fetchDomainsAction, null),
|
||||||
|
appstate.networkStatePart.dispatchAction(appstate.fetchCertificatesAction, null),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
public render(): TemplateResult {
|
||||||
|
switch (this.currentTab) {
|
||||||
|
case 'dns':
|
||||||
|
return this.renderDnsView();
|
||||||
|
case 'domains':
|
||||||
|
return this.renderDomainsView();
|
||||||
|
case 'domain-detail':
|
||||||
|
return this.renderDomainDetailView();
|
||||||
|
default:
|
||||||
|
return this.renderProxyView();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private renderProxyView(): TemplateResult {
|
||||||
|
const stats = this.networkState.stats;
|
||||||
|
return html`
|
||||||
|
<ob-sectionheading>Network</ob-sectionheading>
|
||||||
|
<sz-network-proxy-view
|
||||||
|
.proxyStatus=${stats?.proxy?.running ? 'running' : 'stopped'}
|
||||||
|
.routeCount=${String(stats?.proxy?.routes || 0)}
|
||||||
|
.certificateCount=${String(stats?.proxy?.certificates || 0)}
|
||||||
|
.targetCount=${String(this.networkState.targets.length)}
|
||||||
|
.targets=${this.networkState.targets.map((t) => ({
|
||||||
|
type: t.type,
|
||||||
|
name: t.name,
|
||||||
|
domain: t.domain,
|
||||||
|
target: `${t.targetHost}:${t.targetPort}`,
|
||||||
|
status: t.status,
|
||||||
|
}))}
|
||||||
|
.logs=${[]}
|
||||||
|
@refresh=${() => {
|
||||||
|
appstate.networkStatePart.dispatchAction(appstate.fetchNetworkTargetsAction, null);
|
||||||
|
appstate.networkStatePart.dispatchAction(appstate.fetchNetworkStatsAction, null);
|
||||||
|
}}
|
||||||
|
></sz-network-proxy-view>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
private renderDnsView(): TemplateResult {
|
||||||
|
return html`
|
||||||
|
<ob-sectionheading>DNS Records</ob-sectionheading>
|
||||||
|
<sz-network-dns-view
|
||||||
|
.records=${this.networkState.dnsRecords}
|
||||||
|
@sync=${() => {
|
||||||
|
appstate.networkStatePart.dispatchAction(appstate.syncDnsAction, null);
|
||||||
|
}}
|
||||||
|
@delete=${(e: CustomEvent) => {
|
||||||
|
console.log('Delete DNS record:', e.detail);
|
||||||
|
}}
|
||||||
|
></sz-network-dns-view>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
private renderDomainsView(): TemplateResult {
|
||||||
|
const certs = this.networkState.certificates;
|
||||||
|
return html`
|
||||||
|
<ob-sectionheading>Domains</ob-sectionheading>
|
||||||
|
<sz-network-domains-view
|
||||||
|
.domains=${this.networkState.domains.map((d) => {
|
||||||
|
const cert = certs.find((c) => c.certDomain === d.domain);
|
||||||
|
let certStatus: 'valid' | 'expiring' | 'expired' | 'pending' = 'pending';
|
||||||
|
if (cert) {
|
||||||
|
if (!cert.isValid) certStatus = 'expired';
|
||||||
|
else if (cert.expiresAt && cert.expiresAt - Date.now() < 30 * 24 * 60 * 60 * 1000)
|
||||||
|
certStatus = 'expiring';
|
||||||
|
else certStatus = 'valid';
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
domain: d.domain,
|
||||||
|
provider: 'cloudflare',
|
||||||
|
serviceCount: d.services?.length || 0,
|
||||||
|
certificateStatus: certStatus,
|
||||||
|
};
|
||||||
|
})}
|
||||||
|
@sync=${() => {
|
||||||
|
appstate.networkStatePart.dispatchAction(appstate.fetchDomainsAction, null);
|
||||||
|
}}
|
||||||
|
@view=${(e: CustomEvent) => {
|
||||||
|
this.selectedDomain = e.detail.domain || e.detail;
|
||||||
|
this.currentTab = 'domain-detail';
|
||||||
|
}}
|
||||||
|
></sz-network-domains-view>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
private renderDomainDetailView(): TemplateResult {
|
||||||
|
const domainDetail = this.networkState.domains.find(
|
||||||
|
(d) => d.domain === this.selectedDomain,
|
||||||
|
);
|
||||||
|
const cert = this.networkState.certificates.find(
|
||||||
|
(c) => c.certDomain === this.selectedDomain,
|
||||||
|
);
|
||||||
|
|
||||||
|
return html`
|
||||||
|
<ob-sectionheading>Domain Details</ob-sectionheading>
|
||||||
|
<sz-domain-detail-view
|
||||||
|
.domain=${domainDetail
|
||||||
|
? {
|
||||||
|
id: this.selectedDomain,
|
||||||
|
name: this.selectedDomain,
|
||||||
|
status: 'active',
|
||||||
|
verified: true,
|
||||||
|
createdAt: '',
|
||||||
|
}
|
||||||
|
: null}
|
||||||
|
.certificate=${cert
|
||||||
|
? {
|
||||||
|
id: cert.domainId,
|
||||||
|
domain: cert.certDomain,
|
||||||
|
issuer: 'Let\'s Encrypt',
|
||||||
|
validFrom: cert.issuedAt ? new Date(cert.issuedAt).toISOString() : '',
|
||||||
|
validUntil: cert.expiresAt ? new Date(cert.expiresAt).toISOString() : '',
|
||||||
|
daysRemaining: cert.expiresAt
|
||||||
|
? Math.floor((cert.expiresAt - Date.now()) / (24 * 60 * 60 * 1000))
|
||||||
|
: 0,
|
||||||
|
status: cert.isValid ? 'valid' : 'expired',
|
||||||
|
autoRenew: true,
|
||||||
|
}
|
||||||
|
: null}
|
||||||
|
.dnsRecords=${this.networkState.dnsRecords
|
||||||
|
.filter((r) => r.domain?.includes(this.selectedDomain))
|
||||||
|
.map((r) => ({
|
||||||
|
id: r.id || '',
|
||||||
|
type: r.type,
|
||||||
|
name: r.domain,
|
||||||
|
value: r.value,
|
||||||
|
ttl: 3600,
|
||||||
|
}))}
|
||||||
|
@renew-certificate=${() => {
|
||||||
|
appstate.networkStatePart.dispatchAction(appstate.renewCertificateAction, {
|
||||||
|
domain: this.selectedDomain,
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
></sz-domain-detail-view>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
}
|
||||||
84
ts_web/elements/ob-view-registries.ts
Normal file
84
ts_web/elements/ob-view-registries.ts
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import * as shared from './shared/index.js';
|
||||||
|
import * as appstate from '../appstate.js';
|
||||||
|
import {
|
||||||
|
DeesElement,
|
||||||
|
customElement,
|
||||||
|
html,
|
||||||
|
state,
|
||||||
|
css,
|
||||||
|
cssManager,
|
||||||
|
type TemplateResult,
|
||||||
|
} from '@design.estate/dees-element';
|
||||||
|
|
||||||
|
@customElement('ob-view-registries')
|
||||||
|
export class ObViewRegistries extends DeesElement {
|
||||||
|
@state()
|
||||||
|
accessor registriesState: appstate.IRegistriesState = {
|
||||||
|
tokens: [],
|
||||||
|
registryStatus: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
@state()
|
||||||
|
accessor currentTab: 'onebox' | 'external' = 'onebox';
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
|
||||||
|
const registriesSub = appstate.registriesStatePart
|
||||||
|
.select((s) => s)
|
||||||
|
.subscribe((newState) => {
|
||||||
|
this.registriesState = newState;
|
||||||
|
});
|
||||||
|
this.rxSubscriptions.push(registriesSub);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static styles = [
|
||||||
|
cssManager.defaultStyles,
|
||||||
|
shared.viewHostCss,
|
||||||
|
css``,
|
||||||
|
];
|
||||||
|
|
||||||
|
async connectedCallback() {
|
||||||
|
super.connectedCallback();
|
||||||
|
await appstate.registriesStatePart.dispatchAction(
|
||||||
|
appstate.fetchRegistryTokensAction,
|
||||||
|
null,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public render(): TemplateResult {
|
||||||
|
switch (this.currentTab) {
|
||||||
|
case 'external':
|
||||||
|
return this.renderExternalView();
|
||||||
|
default:
|
||||||
|
return this.renderOneboxView();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private renderOneboxView(): TemplateResult {
|
||||||
|
return html`
|
||||||
|
<ob-sectionheading>Registries</ob-sectionheading>
|
||||||
|
<sz-registry-advertisement
|
||||||
|
.status=${'running'}
|
||||||
|
.registryUrl=${'localhost:5000'}
|
||||||
|
@manage-tokens=${() => {
|
||||||
|
// tokens are managed via the tokens view
|
||||||
|
appstate.uiStatePart.dispatchAction(appstate.setActiveViewAction, { view: 'tokens' });
|
||||||
|
}}
|
||||||
|
></sz-registry-advertisement>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
private renderExternalView(): TemplateResult {
|
||||||
|
return html`
|
||||||
|
<ob-sectionheading>External Registries</ob-sectionheading>
|
||||||
|
<sz-registry-external-view
|
||||||
|
.registries=${[]}
|
||||||
|
@add=${(e: CustomEvent) => {
|
||||||
|
console.log('Add external registry:', e.detail);
|
||||||
|
}}
|
||||||
|
></sz-registry-external-view>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
}
|
||||||
321
ts_web/elements/ob-view-services.ts
Normal file
321
ts_web/elements/ob-view-services.ts
Normal file
@@ -0,0 +1,321 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import * as shared from './shared/index.js';
|
||||||
|
import * as appstate from '../appstate.js';
|
||||||
|
import * as interfaces from '../../ts_interfaces/index.js';
|
||||||
|
import {
|
||||||
|
DeesElement,
|
||||||
|
customElement,
|
||||||
|
html,
|
||||||
|
state,
|
||||||
|
css,
|
||||||
|
cssManager,
|
||||||
|
type TemplateResult,
|
||||||
|
} from '@design.estate/dees-element';
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Data transformation helpers
|
||||||
|
// Maps backend data shapes to @serve.zone/catalog component interfaces
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
function formatBytes(bytes: number): string {
|
||||||
|
if (!bytes || bytes === 0) return '0 B';
|
||||||
|
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||||
|
const k = 1024;
|
||||||
|
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||||
|
const value = bytes / Math.pow(k, i);
|
||||||
|
return `${value.toFixed(1)} ${units[i]}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseImageString(image: string): { repository: string; tag: string } {
|
||||||
|
const lastColon = image.lastIndexOf(':');
|
||||||
|
const lastSlash = image.lastIndexOf('/');
|
||||||
|
if (lastColon > lastSlash && lastColon > 0) {
|
||||||
|
return {
|
||||||
|
repository: image.substring(0, lastColon),
|
||||||
|
tag: image.substring(lastColon + 1),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return { repository: image, tag: 'latest' };
|
||||||
|
}
|
||||||
|
|
||||||
|
function mapStatus(status: string): 'running' | 'stopped' | 'starting' | 'error' {
|
||||||
|
switch (status) {
|
||||||
|
case 'running': return 'running';
|
||||||
|
case 'starting': return 'starting';
|
||||||
|
case 'failed': return 'error';
|
||||||
|
case 'stopped':
|
||||||
|
case 'stopping':
|
||||||
|
default: return 'stopped';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function toServiceDetail(service: interfaces.data.IService) {
|
||||||
|
const parsed = parseImageString(service.image);
|
||||||
|
return {
|
||||||
|
name: service.name,
|
||||||
|
status: mapStatus(service.status),
|
||||||
|
image: service.image,
|
||||||
|
port: service.port,
|
||||||
|
domain: service.domain || null,
|
||||||
|
containerId: service.containerID || '',
|
||||||
|
created: service.createdAt ? new Date(service.createdAt).toLocaleString() : '-',
|
||||||
|
updated: service.updatedAt ? new Date(service.updatedAt).toLocaleString() : '-',
|
||||||
|
registry: service.useOneboxRegistry ? 'Onebox Registry' : (service.registry || 'Docker Hub'),
|
||||||
|
repository: service.registryRepository || parsed.repository,
|
||||||
|
tag: service.registryImageTag || parsed.tag,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function toServiceStats(stats: interfaces.data.IContainerStats) {
|
||||||
|
return {
|
||||||
|
cpu: stats.cpuPercent,
|
||||||
|
memory: formatBytes(stats.memoryUsed),
|
||||||
|
memoryLimit: formatBytes(stats.memoryLimit),
|
||||||
|
networkIn: formatBytes(stats.networkRx),
|
||||||
|
networkOut: formatBytes(stats.networkTx),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseLogs(logs: any): Array<{ timestamp: string; message: string }> {
|
||||||
|
if (Array.isArray(logs)) {
|
||||||
|
return logs.map((entry: any) => ({
|
||||||
|
timestamp: entry.timestamp ? String(entry.timestamp) : '',
|
||||||
|
message: entry.message || String(entry),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
if (typeof logs === 'string' && logs.trim()) {
|
||||||
|
return logs.split('\n').filter((line: string) => line.trim()).map((line: string) => {
|
||||||
|
const match = line.match(/^(\d{4}-\d{2}-\d{2}T[\d:.]+Z?)\s+(.*)/);
|
||||||
|
if (match) {
|
||||||
|
return { timestamp: match[1], message: match[2] };
|
||||||
|
}
|
||||||
|
return { timestamp: '', message: line };
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const defaultStats = { cpu: 0, memory: '0 B', memoryLimit: '0 B', networkIn: '0 B', networkOut: '0 B' };
|
||||||
|
|
||||||
|
@customElement('ob-view-services')
|
||||||
|
export class ObViewServices extends DeesElement {
|
||||||
|
@state()
|
||||||
|
accessor servicesState: appstate.IServicesState = {
|
||||||
|
services: [],
|
||||||
|
currentService: null,
|
||||||
|
currentServiceLogs: [],
|
||||||
|
currentServiceStats: null,
|
||||||
|
platformServices: [],
|
||||||
|
currentPlatformService: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
@state()
|
||||||
|
accessor backupsState: appstate.IBackupsState = {
|
||||||
|
backups: [],
|
||||||
|
schedules: [],
|
||||||
|
};
|
||||||
|
|
||||||
|
@state()
|
||||||
|
accessor currentView: 'list' | 'create' | 'detail' | 'backups' | 'platform-detail' = 'list';
|
||||||
|
|
||||||
|
@state()
|
||||||
|
accessor selectedServiceName: string = '';
|
||||||
|
|
||||||
|
@state()
|
||||||
|
accessor selectedPlatformType: string = '';
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
|
||||||
|
const servicesSub = appstate.servicesStatePart
|
||||||
|
.select((s) => s)
|
||||||
|
.subscribe((newState) => {
|
||||||
|
this.servicesState = newState;
|
||||||
|
});
|
||||||
|
this.rxSubscriptions.push(servicesSub);
|
||||||
|
|
||||||
|
const backupsSub = appstate.backupsStatePart
|
||||||
|
.select((s) => s)
|
||||||
|
.subscribe((newState) => {
|
||||||
|
this.backupsState = newState;
|
||||||
|
});
|
||||||
|
this.rxSubscriptions.push(backupsSub);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static styles = [
|
||||||
|
cssManager.defaultStyles,
|
||||||
|
shared.viewHostCss,
|
||||||
|
css``,
|
||||||
|
];
|
||||||
|
|
||||||
|
async connectedCallback() {
|
||||||
|
super.connectedCallback();
|
||||||
|
await Promise.all([
|
||||||
|
appstate.servicesStatePart.dispatchAction(appstate.fetchServicesAction, null),
|
||||||
|
appstate.servicesStatePart.dispatchAction(appstate.fetchPlatformServicesAction, null),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
public render(): TemplateResult {
|
||||||
|
switch (this.currentView) {
|
||||||
|
case 'create':
|
||||||
|
return this.renderCreateView();
|
||||||
|
case 'detail':
|
||||||
|
return this.renderDetailView();
|
||||||
|
case 'backups':
|
||||||
|
return this.renderBackupsView();
|
||||||
|
case 'platform-detail':
|
||||||
|
return this.renderPlatformDetailView();
|
||||||
|
default:
|
||||||
|
return this.renderListView();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private renderListView(): TemplateResult {
|
||||||
|
const mappedServices = this.servicesState.services.map((s) => ({
|
||||||
|
name: s.name,
|
||||||
|
image: s.image,
|
||||||
|
domain: s.domain || null,
|
||||||
|
status: mapStatus(s.status),
|
||||||
|
}));
|
||||||
|
return html`
|
||||||
|
<ob-sectionheading>Services</ob-sectionheading>
|
||||||
|
<sz-services-list-view
|
||||||
|
.services=${mappedServices}
|
||||||
|
@service-click=${(e: CustomEvent) => {
|
||||||
|
this.selectedServiceName = e.detail.name || e.detail.service?.name;
|
||||||
|
appstate.servicesStatePart.dispatchAction(appstate.fetchServiceAction, {
|
||||||
|
name: this.selectedServiceName,
|
||||||
|
});
|
||||||
|
appstate.servicesStatePart.dispatchAction(appstate.fetchServiceLogsAction, {
|
||||||
|
name: this.selectedServiceName,
|
||||||
|
});
|
||||||
|
appstate.servicesStatePart.dispatchAction(appstate.fetchServiceStatsAction, {
|
||||||
|
name: this.selectedServiceName,
|
||||||
|
});
|
||||||
|
this.currentView = 'detail';
|
||||||
|
}}
|
||||||
|
@service-action=${(e: CustomEvent) => this.handleServiceAction(e)}
|
||||||
|
></sz-services-list-view>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
private renderCreateView(): TemplateResult {
|
||||||
|
return html`
|
||||||
|
<ob-sectionheading>Create Service</ob-sectionheading>
|
||||||
|
<sz-service-create-view
|
||||||
|
.registries=${[]}
|
||||||
|
@create-service=${async (e: CustomEvent) => {
|
||||||
|
await appstate.servicesStatePart.dispatchAction(appstate.createServiceAction, {
|
||||||
|
config: e.detail,
|
||||||
|
});
|
||||||
|
this.currentView = 'list';
|
||||||
|
}}
|
||||||
|
@cancel=${() => {
|
||||||
|
this.currentView = 'list';
|
||||||
|
}}
|
||||||
|
></sz-service-create-view>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
private renderDetailView(): TemplateResult {
|
||||||
|
const service = this.servicesState.currentService;
|
||||||
|
const transformedService = service ? toServiceDetail(service) : null;
|
||||||
|
const transformedStats = this.servicesState.currentServiceStats
|
||||||
|
? toServiceStats(this.servicesState.currentServiceStats)
|
||||||
|
: defaultStats;
|
||||||
|
const transformedLogs = parseLogs(this.servicesState.currentServiceLogs);
|
||||||
|
|
||||||
|
return html`
|
||||||
|
<ob-sectionheading>Service Details</ob-sectionheading>
|
||||||
|
<sz-service-detail-view
|
||||||
|
.service=${transformedService}
|
||||||
|
.logs=${transformedLogs}
|
||||||
|
.stats=${transformedStats}
|
||||||
|
@back=${() => {
|
||||||
|
this.currentView = 'list';
|
||||||
|
}}
|
||||||
|
@service-action=${(e: CustomEvent) => this.handleServiceAction(e)}
|
||||||
|
></sz-service-detail-view>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
private renderBackupsView(): TemplateResult {
|
||||||
|
return html`
|
||||||
|
<ob-sectionheading>Backups</ob-sectionheading>
|
||||||
|
<sz-services-backups-view
|
||||||
|
.schedules=${this.backupsState.schedules}
|
||||||
|
.backups=${this.backupsState.backups}
|
||||||
|
@create-schedule=${(e: CustomEvent) => {
|
||||||
|
appstate.backupsStatePart.dispatchAction(appstate.createScheduleAction, {
|
||||||
|
config: e.detail,
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
@run-now=${(e: CustomEvent) => {
|
||||||
|
appstate.backupsStatePart.dispatchAction(appstate.triggerScheduleAction, {
|
||||||
|
scheduleId: e.detail.scheduleId,
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
@delete-backup=${(e: CustomEvent) => {
|
||||||
|
appstate.backupsStatePart.dispatchAction(appstate.deleteBackupAction, {
|
||||||
|
backupId: e.detail.backupId,
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
></sz-services-backups-view>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
private renderPlatformDetailView(): TemplateResult {
|
||||||
|
const platformService = this.servicesState.platformServices.find(
|
||||||
|
(ps) => ps.type === this.selectedPlatformType,
|
||||||
|
);
|
||||||
|
return html`
|
||||||
|
<ob-sectionheading>Platform Service</ob-sectionheading>
|
||||||
|
<sz-platform-service-detail-view
|
||||||
|
.service=${platformService
|
||||||
|
? {
|
||||||
|
id: platformService.type,
|
||||||
|
name: platformService.displayName,
|
||||||
|
type: platformService.type,
|
||||||
|
status: platformService.status,
|
||||||
|
version: '',
|
||||||
|
host: 'localhost',
|
||||||
|
port: 0,
|
||||||
|
config: {},
|
||||||
|
}
|
||||||
|
: null}
|
||||||
|
.logs=${[]}
|
||||||
|
@start=${() => {
|
||||||
|
appstate.servicesStatePart.dispatchAction(appstate.startPlatformServiceAction, {
|
||||||
|
serviceType: this.selectedPlatformType as any,
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
@stop=${() => {
|
||||||
|
appstate.servicesStatePart.dispatchAction(appstate.stopPlatformServiceAction, {
|
||||||
|
serviceType: this.selectedPlatformType as any,
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
></sz-platform-service-detail-view>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async handleServiceAction(e: CustomEvent) {
|
||||||
|
const action = e.detail.action;
|
||||||
|
const name = e.detail.service?.name || e.detail.name || this.selectedServiceName;
|
||||||
|
switch (action) {
|
||||||
|
case 'start':
|
||||||
|
await appstate.servicesStatePart.dispatchAction(appstate.startServiceAction, { name });
|
||||||
|
break;
|
||||||
|
case 'stop':
|
||||||
|
await appstate.servicesStatePart.dispatchAction(appstate.stopServiceAction, { name });
|
||||||
|
break;
|
||||||
|
case 'restart':
|
||||||
|
await appstate.servicesStatePart.dispatchAction(appstate.restartServiceAction, { name });
|
||||||
|
break;
|
||||||
|
case 'delete':
|
||||||
|
await appstate.servicesStatePart.dispatchAction(appstate.deleteServiceAction, { name });
|
||||||
|
this.currentView = 'list';
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
93
ts_web/elements/ob-view-settings.ts
Normal file
93
ts_web/elements/ob-view-settings.ts
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import * as shared from './shared/index.js';
|
||||||
|
import * as appstate from '../appstate.js';
|
||||||
|
import {
|
||||||
|
DeesElement,
|
||||||
|
customElement,
|
||||||
|
html,
|
||||||
|
state,
|
||||||
|
css,
|
||||||
|
cssManager,
|
||||||
|
type TemplateResult,
|
||||||
|
} from '@design.estate/dees-element';
|
||||||
|
|
||||||
|
@customElement('ob-view-settings')
|
||||||
|
export class ObViewSettings extends DeesElement {
|
||||||
|
@state()
|
||||||
|
accessor settingsState: appstate.ISettingsState = {
|
||||||
|
settings: null,
|
||||||
|
backupPasswordConfigured: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
@state()
|
||||||
|
accessor loginState: appstate.ILoginState = {
|
||||||
|
identity: null,
|
||||||
|
isLoggedIn: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
|
||||||
|
const settingsSub = appstate.settingsStatePart
|
||||||
|
.select((s) => s)
|
||||||
|
.subscribe((newState) => {
|
||||||
|
this.settingsState = newState;
|
||||||
|
});
|
||||||
|
this.rxSubscriptions.push(settingsSub);
|
||||||
|
|
||||||
|
const loginSub = appstate.loginStatePart
|
||||||
|
.select((s) => s)
|
||||||
|
.subscribe((newState) => {
|
||||||
|
this.loginState = newState;
|
||||||
|
});
|
||||||
|
this.rxSubscriptions.push(loginSub);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static styles = [
|
||||||
|
cssManager.defaultStyles,
|
||||||
|
shared.viewHostCss,
|
||||||
|
css``,
|
||||||
|
];
|
||||||
|
|
||||||
|
async connectedCallback() {
|
||||||
|
super.connectedCallback();
|
||||||
|
await appstate.settingsStatePart.dispatchAction(appstate.fetchSettingsAction, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public render(): TemplateResult {
|
||||||
|
return html`
|
||||||
|
<ob-sectionheading>Settings</ob-sectionheading>
|
||||||
|
<sz-settings-view
|
||||||
|
.settings=${this.settingsState.settings || {
|
||||||
|
darkMode: true,
|
||||||
|
cloudflareToken: '',
|
||||||
|
cloudflareZoneId: '',
|
||||||
|
autoRenewCerts: false,
|
||||||
|
renewalThreshold: 30,
|
||||||
|
acmeEmail: '',
|
||||||
|
httpPort: 80,
|
||||||
|
httpsPort: 443,
|
||||||
|
forceHttps: false,
|
||||||
|
}}
|
||||||
|
.currentUser=${this.loginState.identity?.username || 'admin'}
|
||||||
|
@setting-change=${(e: CustomEvent) => {
|
||||||
|
const { key, value } = e.detail;
|
||||||
|
appstate.settingsStatePart.dispatchAction(appstate.updateSettingsAction, {
|
||||||
|
settings: { [key]: value },
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
@save=${(e: CustomEvent) => {
|
||||||
|
appstate.settingsStatePart.dispatchAction(appstate.updateSettingsAction, {
|
||||||
|
settings: e.detail,
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
@change-password=${(e: CustomEvent) => {
|
||||||
|
console.log('Change password requested:', e.detail);
|
||||||
|
}}
|
||||||
|
@reset=${() => {
|
||||||
|
appstate.settingsStatePart.dispatchAction(appstate.fetchSettingsAction, null);
|
||||||
|
}}
|
||||||
|
></sz-settings-view>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
}
|
||||||
86
ts_web/elements/ob-view-tokens.ts
Normal file
86
ts_web/elements/ob-view-tokens.ts
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import * as shared from './shared/index.js';
|
||||||
|
import * as appstate from '../appstate.js';
|
||||||
|
import {
|
||||||
|
DeesElement,
|
||||||
|
customElement,
|
||||||
|
html,
|
||||||
|
state,
|
||||||
|
css,
|
||||||
|
cssManager,
|
||||||
|
type TemplateResult,
|
||||||
|
} from '@design.estate/dees-element';
|
||||||
|
|
||||||
|
@customElement('ob-view-tokens')
|
||||||
|
export class ObViewTokens extends DeesElement {
|
||||||
|
@state()
|
||||||
|
accessor registriesState: appstate.IRegistriesState = {
|
||||||
|
tokens: [],
|
||||||
|
registryStatus: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
|
||||||
|
const registriesSub = appstate.registriesStatePart
|
||||||
|
.select((s) => s)
|
||||||
|
.subscribe((newState) => {
|
||||||
|
this.registriesState = newState;
|
||||||
|
});
|
||||||
|
this.rxSubscriptions.push(registriesSub);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static styles = [
|
||||||
|
cssManager.defaultStyles,
|
||||||
|
shared.viewHostCss,
|
||||||
|
css``,
|
||||||
|
];
|
||||||
|
|
||||||
|
async connectedCallback() {
|
||||||
|
super.connectedCallback();
|
||||||
|
await appstate.registriesStatePart.dispatchAction(
|
||||||
|
appstate.fetchRegistryTokensAction,
|
||||||
|
null,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public render(): TemplateResult {
|
||||||
|
const globalTokens = this.registriesState.tokens.filter((t) => t.type === 'global');
|
||||||
|
const ciTokens = this.registriesState.tokens.filter((t) => t.type === 'ci');
|
||||||
|
|
||||||
|
return html`
|
||||||
|
<ob-sectionheading>Tokens</ob-sectionheading>
|
||||||
|
<sz-tokens-view
|
||||||
|
.globalTokens=${globalTokens.map((t) => ({
|
||||||
|
id: t.id,
|
||||||
|
name: t.name,
|
||||||
|
type: 'global' as const,
|
||||||
|
createdAt: t.createdAt,
|
||||||
|
lastUsed: t.lastUsed,
|
||||||
|
}))}
|
||||||
|
.ciTokens=${ciTokens.map((t) => ({
|
||||||
|
id: t.id,
|
||||||
|
name: t.name,
|
||||||
|
type: 'ci' as const,
|
||||||
|
service: t.service,
|
||||||
|
createdAt: t.createdAt,
|
||||||
|
lastUsed: t.lastUsed,
|
||||||
|
}))}
|
||||||
|
@create=${(e: CustomEvent) => {
|
||||||
|
appstate.registriesStatePart.dispatchAction(appstate.createRegistryTokenAction, {
|
||||||
|
token: {
|
||||||
|
name: `new-${e.detail.type}-token`,
|
||||||
|
type: e.detail.type,
|
||||||
|
permissions: ['pull'],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
@delete=${(e: CustomEvent) => {
|
||||||
|
appstate.registriesStatePart.dispatchAction(appstate.deleteRegistryTokenAction, {
|
||||||
|
tokenId: e.detail.id || e.detail.tokenId,
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
></sz-tokens-view>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
}
|
||||||
10
ts_web/elements/shared/css.ts
Normal file
10
ts_web/elements/shared/css.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import { css } from '@design.estate/dees-element';
|
||||||
|
|
||||||
|
export const viewHostCss = css`
|
||||||
|
:host {
|
||||||
|
display: block;
|
||||||
|
margin: auto;
|
||||||
|
max-width: 1280px;
|
||||||
|
padding: 16px 16px;
|
||||||
|
}
|
||||||
|
`;
|
||||||
2
ts_web/elements/shared/index.ts
Normal file
2
ts_web/elements/shared/index.ts
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
export * from './css.js';
|
||||||
|
export * from './ob-sectionheading.js';
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user