Compare commits

..

30 Commits

Author SHA1 Message Date
jkunz 7fe63541b3 fix: align delegate routing settings UI
Release / build-and-release (push) Successful in 2m44s
2026-05-08 19:32:40 +00:00
jkunz 201602b733 fix: use compiled-safe password hashing
Release / build-and-release (push) Successful in 2m34s
2026-05-08 16:36:58 +00:00
jkunz cc6a81012c fix: restore onebox daemon startup
Release / build-and-release (push) Successful in 2m28s
2026-05-08 16:23:45 +00:00
jkunz fba143d918 fix: update onebox installer credentials output
Release / build-and-release (push) Successful in 2m32s
2026-05-08 16:12:22 +00:00
jkunz b0f9d71a18 fix: update onebox runtime dependencies
Release / build-and-release (push) Successful in 2m33s
Bump Onebox to 1.24.3 with current API/runtime dependencies, registry routing fixes, safer initial admin handling, and cleaner shutdown of Docker-backed resources.
2026-05-08 15:39:02 +00:00
jkunz 61f72a4b7a docs: refresh readme and legal info 2026-05-07 20:22:12 +00:00
jkunz c04be7117e feat: expose dcrouter gateway settings 2026-04-29 15:57:10 +00:00
jkunz 7ee740695f feat: add dcrouter external gateway sync 2026-04-29 15:24:25 +00:00
jkunz 1f3705fa25 chore: remove committed dist_serve artifacts 2026-04-29 15:19:28 +00:00
jkunz 90ca53356d fix: restore platform backup data 2026-04-29 14:11:00 +00:00
jkunz 69b528a499 fix: replace stopped platform containers 2026-04-29 07:39:42 +00:00
jkunz 63c6fb4b6a fix: use absolute platform data paths 2026-04-29 07:34:15 +00:00
jkunz 35f83d7c2d fix: isolate platform service data dirs 2026-04-29 02:05:53 +00:00
jkunz c451d71a97 feat: add appstore install CLI 2026-04-29 01:59:09 +00:00
jkunz 2b51178016 fix: clean up registry shutdown 2026-04-29 01:29:53 +00:00
jkunz 5cb6895a14 fix: clean up SmartProxy lifecycle 2026-04-28 21:59:00 +00:00
jkunz c5d9158078 feat: replace onebox ingress with SmartProxy 2026-04-28 21:30:48 +00:00
jkunz 0f5ce708d9 fix: require app template environment values 2026-04-28 15:07:13 +00:00
jkunz 3da7e431c2 refactor: complete opsserver migration 2026-04-28 14:35:26 +00:00
jkunz 49c1830168 feat: resolve app template env placeholders 2026-04-28 14:28:01 +00:00
jkunz 061ce7c3f2 feat: add secret settings manager and migration for legacy settings
- Implemented SecretSettingsManager to handle secret settings with encryption.
- Added functionality to migrate legacy plaintext settings into encrypted storage.
- Introduced methods for setting, getting, and clearing secret settings.
- Created tests for verifying the migration and canonicalization of secret settings.
- Updated app state to handle service updates via socket communication.
- Added interface for push service updates to manage service state changes.
2026-04-19 01:47:06 +00:00
jkunz 618d4d674f Add tests for authentication and security features
- Implement unit tests for password handling in `auth_test.ts`, covering bcrypt and legacy password hashes.
- Create a fake database for user management to facilitate testing of the `AdminHandler`.
- Validate JWT-based identity verification against database records.
- Introduce tests for credential encryption and registry management in `security_test.ts`.
- Ensure registry passwords are securely stored and can be decrypted correctly, including legacy support.
- Add utility functions for password hashing and verification in `auth.ts`.
2026-04-19 01:30:54 +00:00
jkunz 0c9eb0653d v1.24.2
Release / build-and-release (push) Successful in 3m37s
2026-03-24 20:17:30 +00:00
jkunz ed6a35eb86 fix(deps): bump runtime and build tool dependencies 2026-03-24 20:17:30 +00:00
jkunz 242677404b v1.24.1
Release / build-and-release (push) Failing after 24s
2026-03-24 20:08:25 +00:00
jkunz 8c6159c596 fix(repo): migrate smart build config to .smartconfig.json and tidy repository metadata 2026-03-24 20:08:25 +00:00
jkunz c210507951 v1.24.0
Release / build-and-release (push) Successful in 3m6s
2026-03-24 19:54:56 +00:00
jkunz 0799efadae feat(backup): add containerarchive-backed backup storage, restore, download, and pruning support 2026-03-24 19:54:56 +00:00
jkunz 22a7e76645 v1.23.0
Release / build-and-release (push) Successful in 3m24s
2026-03-21 19:36:25 +00:00
jkunz 22f34e7de5 feat(appstore): add remote app store templates with service upgrades and Redis/MariaDB platform support 2026-03-21 19:36:25 +00:00
99 changed files with 7573 additions and 42566 deletions
+27
View File
@@ -1,3 +1,30 @@
.nogit/
# artifacts
coverage/
public/
# installs
node_modules/
# caches
.yarn/
.cache/
.rpt2_cache
# builds
dist/
dist_*/
# rust
rust/target/
dist_rust/
# AI
.claude/
.serena/
#------# custom
# Deno # Deno
.deno/ .deno/
deno.lock deno.lock
+25 -3
View File
@@ -7,7 +7,12 @@
"outputMode": "base64ts", "outputMode": "base64ts",
"bundler": "esbuild", "bundler": "esbuild",
"production": true, "production": true,
"includeFiles": [{"from": "./html/index.html", "to": "index.html"}] "includeFiles": [
{
"from": "./html/index.html",
"to": "index.html"
}
]
} }
] ]
}, },
@@ -40,7 +45,12 @@
"bundler": "esbuild", "bundler": "esbuild",
"production": true, "production": true,
"watchPatterns": ["./ts_web/**/*", "./html/**/*"], "watchPatterns": ["./ts_web/**/*", "./html/**/*"],
"includeFiles": [{"from": "./html/index.html", "to": "index.html"}] "includeFiles": [
{
"from": "./html/index.html",
"to": "index.html"
}
]
} }
], ],
"watchers": [ "watchers": [
@@ -53,5 +63,17 @@
"runOnStart": true "runOnStart": true
} }
] ]
} },
"@git.zone/cli": {
"projectType": "denoSaaS",
"module": {
"githost": "code.foss.global",
"gitscope": "serve.zone",
"gitrepo": "onebox",
"description": "Self-hosted container platform with automatic SSL and DNS - a mini Heroku for single servers",
"npmPackagename": "@serve.zone/onebox",
"license": "MIT"
}
},
"@ship.zone/szci": {}
} }
+159 -37
View File
@@ -1,6 +1,74 @@
# Changelog # Changelog
## 2026-05-08 - 1.24.7 - fix(web-ui)
align Delegate Routing settings with the Dees catalog control and theme conventions
- replace raw Delegate Routing inputs and save button with `dees-input-text` and `dees-button`
- style the Delegate Routing card with explicit `cssManager.bdTheme(...)` colors
## 2026-05-08 - 1.24.6 - fix(auth)
avoid bcrypt worker crashes in compiled binaries during login and password creation
- replace bcrypt password hashing with a Web Crypto PBKDF2 hash format
- remove legacy password-hash fallbacks; existing deployments need their admin user hash updated
## 2026-05-08 - 1.24.5 - fix(opsserver)
start the OpsServer with typedserver custom routes registered through the UtilityWebsiteServer hook
- fixes daemon startup with the current typedserver lifecycle
- cap SmartProxy readiness waiting at 10 seconds during daemon startup
## 2026-05-08 - 1.24.4 - fix(installer)
avoid documenting a hardcoded initial admin password for fresh installs
- update installer output to point operators to the service logs or `ONEBOX_ADMIN_PASSWORD` for initial credentials
## 2026-05-08 - 1.24.3 - fix(runtime)
upgrade runtime dependencies and harden registry/shutdown behavior
- update Deno, API, Docker, Cloudflare, SmartACME, SmartRegistry, SmartStorage, TaskBuffer, catalog, and build-tool dependencies
- expose the embedded OCI registry through OpsServer `/v2` routes with the configured token realm
- avoid creating a hardcoded default admin password and close Docker/log receiver resources during shutdown
## 2026-03-24 - 1.24.2 - fix(deps)
bump runtime and build tool dependencies
- update @design.estate/dees-catalog to ^3.49.0
- update development tooling packages @git.zone/tsbundle, @git.zone/tsdeno, and @git.zone/tswatch
## 2026-03-24 - 1.24.1 - fix(repo)
migrate smart build config to .smartconfig.json and tidy repository metadata
- Rename npmextra.json to .smartconfig.json and extend it with CLI project metadata for the repository.
- Mark the package as private and add an empty pnpm overrides block in package.json.
- Expand .gitignore to cover common build artifacts, caches, install directories, and local tooling folders.
- Reformat changelog and README files for cleaner spacing and Markdown table alignment without changing documented behavior.
## 2026-03-24 - 1.24.0 - feat(backup)
add containerarchive-backed backup storage, restore, download, and pruning support
- add database support for archive snapshot IDs and stored size tracking for backups
- initialize and close the backup archive during onebox lifecycle startup and shutdown
- allow backup download and restore flows to work with archive snapshots as well as legacy file-based backups
- schedule daily archive pruning based on the most generous configured retention policy
- replace smarts3 with smartstorage for registry-backed S3-compatible storage
## 2026-03-21 - 1.23.0 - feat(appstore)
add remote app store templates with service upgrades and Redis/MariaDB platform support
- introduces an App Store manager, API handlers, shared request types, and web UI flow for browsing remote templates and deploying services from template metadata
- tracks app template id and version on services, adds upgrade discovery and migration-based service upgrades, and includes a database migration for template version columns
- adds Redis and MariaDB platform service providers with provisioning plus backup and restore support, and exposes their requirements through service creation and app template config
## 2026-03-18 - 1.22.2 - fix(web-ui) ## 2026-03-18 - 1.22.2 - fix(web-ui)
stabilize app store service creation flow and add Ghost sqlite defaults stabilize app store service creation flow and add Ghost sqlite defaults
- Defers App Store navigation to the services view to avoid destroying the current view during the deploy event handler. - Defers App Store navigation to the services view to avoid destroying the current view during the deploy event handler.
@@ -9,10 +77,11 @@ stabilize app store service creation flow and add Ghost sqlite defaults
- Removes obsolete Gitea CI and npm publish workflow definitions. - Removes obsolete Gitea CI and npm publish workflow definitions.
## 2026-03-18 - 1.22.1 - fix(repo) ## 2026-03-18 - 1.22.1 - fix(repo)
no changes to commit no changes to commit
## 2026-03-18 - 1.22.0 - feat(web-appstore) ## 2026-03-18 - 1.22.0 - feat(web-appstore)
add an App Store view for quick service deployment from curated templates add an App Store view for quick service deployment from curated templates
- adds a new App Store tab to the web UI with curated Docker app templates - adds a new App Store tab to the web UI with curated Docker app templates
@@ -21,6 +90,7 @@ add an App Store view for quick service deployment from curated templates
- updates @serve.zone/catalog to ^2.8.0 to support the new app store view - updates @serve.zone/catalog to ^2.8.0 to support the new app store view
## 2026-03-18 - 1.21.0 - feat(opsserver) ## 2026-03-18 - 1.21.0 - feat(opsserver)
add container workspace API and backend execution environment for services add container workspace API and backend execution environment for services
- introduces typed workspace handlers for reading, writing, listing, creating, removing, and executing commands inside service containers - introduces typed workspace handlers for reading, writing, listing, creating, removing, and executing commands inside service containers
@@ -28,6 +98,7 @@ add container workspace API and backend execution environment for services
- extends Docker exec lookup to resolve Swarm service container IDs when a direct container ID is unavailable - extends Docker exec lookup to resolve Swarm service container IDs when a direct container ID is unavailable
## 2026-03-17 - 1.20.0 - feat(ops-dashboard) ## 2026-03-17 - 1.20.0 - feat(ops-dashboard)
stream user service logs to the ops dashboard and resolve service containers for Docker log streaming stream user service logs to the ops dashboard and resolve service containers for Docker log streaming
- add typed socket support for pushing live user service log entries to the web app - add typed socket support for pushing live user service log entries to the web app
@@ -37,58 +108,61 @@ stream user service logs to the ops dashboard and resolve service containers for
- bump @serve.zone/catalog to ^2.7.0 - bump @serve.zone/catalog to ^2.7.0
## 2026-03-17 - 1.19.12 - fix(repo) ## 2026-03-17 - 1.19.12 - fix(repo)
no changes to commit
no changes to commit
## 2026-03-17 - 1.19.11 - fix(repo) ## 2026-03-17 - 1.19.11 - fix(repo)
no changes to commit
no changes to commit
## 2026-03-17 - 1.19.10 - fix(repo) ## 2026-03-17 - 1.19.10 - fix(repo)
no changes to commit
no changes to commit
## 2026-03-17 - 1.19.9 - fix(repo) ## 2026-03-17 - 1.19.9 - fix(repo)
no changes to commit
no changes to commit
## 2026-03-17 - 1.19.8 - fix(repo) ## 2026-03-17 - 1.19.8 - fix(repo)
no changes to commit
no changes to commit
## 2026-03-17 - 1.19.7 - fix(repo) ## 2026-03-17 - 1.19.7 - fix(repo)
no changes to commit
no changes to commit
## 2026-03-17 - 1.19.6 - fix(repository) ## 2026-03-17 - 1.19.6 - fix(repository)
no changes to commit
no changes to commit
## 2026-03-17 - 1.19.5 - fix(repo) ## 2026-03-17 - 1.19.5 - fix(repo)
no changes to commit
no changes to commit
## 2026-03-17 - 1.19.4 - fix(repository) ## 2026-03-17 - 1.19.4 - fix(repository)
no changes to commit
no changes to commit
## 2026-03-16 - 1.19.3 - fix(repo) ## 2026-03-16 - 1.19.3 - fix(repo)
no changes to commit no changes to commit
## 2026-03-16 - 1.19.2 - fix(docs) ## 2026-03-16 - 1.19.2 - fix(docs)
remove outdated UI screenshot assets from project documentation remove outdated UI screenshot assets from project documentation
- Deletes multiple PNG screenshots that documented previous dashboard, service form, and hello-world states. - Deletes multiple PNG screenshots that documented previous dashboard, service form, and hello-world states.
- Reduces repository clutter by removing obsolete image assets no longer needed in docs. - Reduces repository clutter by removing obsolete image assets no longer needed in docs.
## 2026-03-16 - 1.19.1 - fix(dashboard) ## 2026-03-16 - 1.19.1 - fix(dashboard)
add updated dashboard screenshots for refresh and resource usage states add updated dashboard screenshots for refresh and resource usage states
- Adds new dashboard screenshots covering post-refresh, resource usage, and populated data views. - Adds new dashboard screenshots covering post-refresh, resource usage, and populated data views.
- Updates visual assets to document current dashboard behavior and UI states. - Updates visual assets to document current dashboard behavior and UI states.
## 2026-03-16 - 1.19.1 - fix(dashboard) ## 2026-03-16 - 1.19.1 - fix(dashboard)
add aggregated resource usage stats to the dashboard add aggregated resource usage stats to the dashboard
- Aggregate CPU, memory, and network stats across all running user and platform service containers in getSystemStatus - Aggregate CPU, memory, and network stats across all running user and platform service containers in getSystemStatus
@@ -97,6 +171,7 @@ add aggregated resource usage stats to the dashboard
- Wire dashboard resource usage card to display real aggregated data from the backend - Wire dashboard resource usage card to display real aggregated data from the backend
## 2026-03-16 - 1.19.0 - feat(opsserver,web) ## 2026-03-16 - 1.19.0 - feat(opsserver,web)
add real-time platform service log streaming to the dashboard add real-time platform service log streaming to the dashboard
- stream running platform service container logs from the ops server to connected dashboard clients via TypedSocket - stream running platform service container logs from the ops server to connected dashboard clients via TypedSocket
@@ -105,6 +180,7 @@ add real-time platform service log streaming to the dashboard
- add the typedsocket dependency and update the catalog package for dashboard support - add the typedsocket dependency and update the catalog package for dashboard support
## 2026-03-16 - 1.18.5 - fix(platform-services) ## 2026-03-16 - 1.18.5 - fix(platform-services)
fix platform service detail view navigation and log display fix platform service detail view navigation and log display
- Add back button to platform service detail view for returning to services list - Add back button to platform service detail view for returning to services list
@@ -113,23 +189,25 @@ fix platform service detail view navigation and log display
- Clear previous stats/logs state before fetching new platform service data - Clear previous stats/logs state before fetching new platform service data
## 2026-03-16 - 1.18.4 - fix(repo) ## 2026-03-16 - 1.18.4 - fix(repo)
no changes to commit no changes to commit
## 2026-03-16 - 1.18.3 - fix(deps) ## 2026-03-16 - 1.18.3 - fix(deps)
bump @serve.zone/catalog to ^2.6.1 bump @serve.zone/catalog to ^2.6.1
- Updates the @serve.zone/catalog runtime dependency from ^2.6.0 to ^2.6.1. - Updates the @serve.zone/catalog runtime dependency from ^2.6.0 to ^2.6.1.
## 2026-03-16 - 1.18.2 - fix(repo) ## 2026-03-16 - 1.18.2 - fix(repo)
no changes to commit
no changes to commit
## 2026-03-16 - 1.18.1 - fix(repo) ## 2026-03-16 - 1.18.1 - fix(repo)
no changes to commit no changes to commit
## 2026-03-16 - 1.18.0 - feat(platform-services) ## 2026-03-16 - 1.18.0 - feat(platform-services)
add platform service log retrieval and display in the services UI add platform service log retrieval and display in the services UI
- add typed request support in the ops server to fetch Docker logs for platform service containers - add typed request support in the ops server to fetch Docker logs for platform service containers
@@ -137,18 +215,21 @@ add platform service log retrieval and display in the services UI
- render platform service logs in the services detail view and add sidebar icons for main navigation tabs - render platform service logs in the services detail view and add sidebar icons for main navigation tabs
## 2026-03-16 - 1.17.4 - fix(docs) ## 2026-03-16 - 1.17.4 - fix(docs)
add hello world running screenshot for documentation add hello world running screenshot for documentation
- Adds a new PNG asset showing the application in a running hello world state. - Adds a new PNG asset showing the application in a running hello world state.
- Supports project documentation or README usage without changing runtime behavior. - Supports project documentation or README usage without changing runtime behavior.
## 2026-03-16 - 1.17.3 - fix(mongodb) ## 2026-03-16 - 1.17.3 - fix(mongodb)
downgrade the MongoDB service image to 4.4 and use the legacy mongo shell for container operations downgrade the MongoDB service image to 4.4 and use the legacy mongo shell for container operations
- changes the default MongoDB container image from mongo:7 to mongo:4.4 - changes the default MongoDB container image from mongo:7 to mongo:4.4
- replaces mongosh with mongo for health checks, provisioning, and deprovisioning inside the container - replaces mongosh with mongo for health checks, provisioning, and deprovisioning inside the container
## 2026-03-16 - 1.17.2 - fix(platform-services) ## 2026-03-16 - 1.17.2 - fix(platform-services)
provision ClickHouse, MinIO, and MongoDB resources via docker exec instead of host port access provision ClickHouse, MinIO, and MongoDB resources via docker exec instead of host port access
- switch ClickHouse provisioning and teardown to in-container client commands to avoid host port mapping issues - switch ClickHouse provisioning and teardown to in-container client commands to avoid host port mapping issues
@@ -156,10 +237,11 @@ provision ClickHouse, MinIO, and MongoDB resources via docker exec instead of ho
- run MongoDB provisioning and deprovisioning through mongosh inside the container and improve docker exec failure reporting - run MongoDB provisioning and deprovisioning through mongosh inside the container and improve docker exec failure reporting
## 2026-03-16 - 1.17.1 - fix(repo) ## 2026-03-16 - 1.17.1 - fix(repo)
no changes to commit no changes to commit
## 2026-03-16 - 1.17.0 - feat(web/services) ## 2026-03-16 - 1.17.0 - feat(web/services)
add deploy service action to the services view add deploy service action to the services view
- Adds a prominent "Deploy Service" button to the services page header. - Adds a prominent "Deploy Service" button to the services page header.
@@ -167,6 +249,7 @@ add deploy service action to the services view
- Includes a new service creation form screenshot asset for the updated interface. - Includes a new service creation form screenshot asset for the updated interface.
## 2026-03-16 - 1.16.0 - feat(services) ## 2026-03-16 - 1.16.0 - feat(services)
add platform service navigation and stats in the services UI add platform service navigation and stats in the services UI
- add platform service stats state and fetch action - add platform service stats state and fetch action
@@ -176,24 +259,28 @@ add platform service navigation and stats in the services UI
- bump @serve.zone/catalog to ^2.6.0 for the new platform service UI components - bump @serve.zone/catalog to ^2.6.0 for the new platform service UI components
## 2026-03-16 - 1.15.3 - fix(install) ## 2026-03-16 - 1.15.3 - fix(install)
refresh systemd service configuration before restarting previously running installations refresh systemd service configuration before restarting previously running installations
- Re-enable the systemd service during updates so unit file changes are applied before restart - Re-enable the systemd service during updates so unit file changes are applied before restart
- Add a log message indicating the service configuration is being refreshed - Add a log message indicating the service configuration is being refreshed
## 2026-03-16 - 1.15.2 - fix(systemd) ## 2026-03-16 - 1.15.2 - fix(systemd)
set HOME and DENO_DIR for the systemd service environment set HOME and DENO_DIR for the systemd service environment
- Adds HOME=/root to the generated onebox systemd unit - Adds HOME=/root to the generated onebox systemd unit
- Adds DENO_DIR=/root/.cache/deno so Deno cache paths are available when running as a service - Adds DENO_DIR=/root/.cache/deno so Deno cache paths are available when running as a service
## 2026-03-16 - 1.15.1 - fix(systemd) ## 2026-03-16 - 1.15.1 - fix(systemd)
move Docker installation and swarm initialization to systemd enable flow move Docker installation and swarm initialization to systemd enable flow
- Ensures Docker is installed before writing and enabling the systemd unit that depends on docker.service. - Ensures Docker is installed before writing and enabling the systemd unit that depends on docker.service.
- Removes Docker auto-installation from Onebox initialization so setup happens in the service management path. - Removes Docker auto-installation from Onebox initialization so setup happens in the service management path.
## 2026-03-16 - 1.15.0 - feat(systemd) ## 2026-03-16 - 1.15.0 - feat(systemd)
replace smartdaemon-based service management with native systemd commands replace smartdaemon-based service management with native systemd commands
- adds a dedicated OneboxSystemd manager for enabling, disabling, starting, stopping, checking status, and following logs - adds a dedicated OneboxSystemd manager for enabling, disabling, starting, stopping, checking status, and following logs
@@ -201,28 +288,30 @@ replace smartdaemon-based service management with native systemd commands
- removes the smartdaemon dependency and related service management code - removes the smartdaemon dependency and related service management code
## 2026-03-16 - 1.14.10 - fix(services) ## 2026-03-16 - 1.14.10 - fix(services)
stop auto-update monitoring during shutdown stop auto-update monitoring during shutdown
- Track the auto-update polling interval in the services manager - Track the auto-update polling interval in the services manager
- Clear the auto-update interval when Onebox shuts down to prevent background checks after shutdown - Clear the auto-update interval when Onebox shuts down to prevent background checks after shutdown
## 2026-03-16 - 1.14.9 - fix(repo) ## 2026-03-16 - 1.14.9 - fix(repo)
no changes to commit
no changes to commit
## 2026-03-16 - 1.14.8 - fix(repo) ## 2026-03-16 - 1.14.8 - fix(repo)
no changes to commit
no changes to commit
## 2026-03-16 - 1.14.7 - fix(repo) ## 2026-03-16 - 1.14.7 - fix(repo)
no changes to commit
no changes to commit
## 2026-03-16 - 1.14.6 - fix(project) ## 2026-03-16 - 1.14.6 - fix(project)
no changes to commit no changes to commit
## 2026-03-16 - 1.14.5 - fix(onebox) ## 2026-03-16 - 1.14.5 - fix(onebox)
move Docker auto-install and swarm initialization into Onebox startup flow move Docker auto-install and swarm initialization into Onebox startup flow
- removes Docker setup from daemon service installation - removes Docker setup from daemon service installation
@@ -230,22 +319,23 @@ move Docker auto-install and swarm initialization into Onebox startup flow
- preserves automatic Docker Swarm initialization on fresh servers - preserves automatic Docker Swarm initialization on fresh servers
## 2026-03-16 - 1.14.4 - fix(repo) ## 2026-03-16 - 1.14.4 - fix(repo)
no changes to commit
no changes to commit
## 2026-03-16 - 1.14.3 - fix(repo) ## 2026-03-16 - 1.14.3 - fix(repo)
no changes to commit
no changes to commit
## 2026-03-16 - 1.14.2 - fix(repo) ## 2026-03-16 - 1.14.2 - fix(repo)
no changes to commit
no changes to commit
## 2026-03-16 - 1.14.1 - fix(repo) ## 2026-03-16 - 1.14.1 - fix(repo)
no changes to commit no changes to commit
## 2026-03-16 - 1.14.0 - feat(daemon) ## 2026-03-16 - 1.14.0 - feat(daemon)
auto-install Docker and initialize Swarm during daemon service setup auto-install Docker and initialize Swarm during daemon service setup
- Adds a Docker availability check before installing the Onebox daemon service - Adds a Docker availability check before installing the Onebox daemon service
@@ -253,75 +343,83 @@ auto-install Docker and initialize Swarm during daemon service setup
- Attempts to initialize Docker Swarm after installation and handles already-initialized environments gracefully - Attempts to initialize Docker Swarm after installation and handles already-initialized environments gracefully
## 2026-03-16 - 1.13.17 - fix(ci) ## 2026-03-16 - 1.13.17 - fix(ci)
remove forced container image pulling from Gitea workflow jobs remove forced container image pulling from Gitea workflow jobs
- Drops the `--pull always` container option from CI, npm publish, and release workflows. - Drops the `--pull always` container option from CI, npm publish, and release workflows.
- Keeps workflow container images unchanged while avoiding forced pulls on every job run. - Keeps workflow container images unchanged while avoiding forced pulls on every job run.
## 2026-03-16 - 1.13.16 - fix(ci) ## 2026-03-16 - 1.13.16 - fix(ci)
refresh workflow container images on every run and bump @apiclient.xyz/docker to ^5.1.1 refresh workflow container images on every run and bump @apiclient.xyz/docker to ^5.1.1
- add --pull always to CI, release, and npm publish workflow containers to avoid stale images - add --pull always to CI, release, and npm publish workflow containers to avoid stale images
- update @apiclient.xyz/docker from ^5.1.0 to ^5.1.1 in deno.json - update @apiclient.xyz/docker from ^5.1.0 to ^5.1.1 in deno.json
## 2026-03-15 - 1.13.15 - fix(repo) ## 2026-03-15 - 1.13.15 - fix(repo)
no changes to commit
no changes to commit
## 2026-03-15 - 1.13.14 - fix(repo) ## 2026-03-15 - 1.13.14 - fix(repo)
no changes to commit
no changes to commit
## 2026-03-15 - 1.13.13 - fix(repo) ## 2026-03-15 - 1.13.13 - fix(repo)
no changes to commit no changes to commit
## 2026-03-15 - 1.13.12 - fix(ci) ## 2026-03-15 - 1.13.12 - fix(ci)
run pnpm install with --ignore-scripts in CI and release workflows run pnpm install with --ignore-scripts in CI and release workflows
- Update CI workflow dependency installation steps to skip lifecycle scripts during builds. - Update CI workflow dependency installation steps to skip lifecycle scripts during builds.
- Apply the same install change to the release workflow for consistent automation behavior. - Apply the same install change to the release workflow for consistent automation behavior.
## 2026-03-15 - 1.13.11 - fix(project) ## 2026-03-15 - 1.13.11 - fix(project)
no changes to commit no changes to commit
## 2026-03-15 - 1.13.10 - fix(deps) ## 2026-03-15 - 1.13.10 - fix(deps)
bump @git.zone/tsdeno to ^1.2.0 bump @git.zone/tsdeno to ^1.2.0
- Updates the tsdeno development dependency from ^1.1.1 to ^1.2.0. - Updates the tsdeno development dependency from ^1.1.1 to ^1.2.0.
## 2026-03-15 - 1.13.9 - fix(repo) ## 2026-03-15 - 1.13.9 - fix(repo)
no changes to commit
no changes to commit
## 2026-03-15 - 1.13.8 - fix(repo) ## 2026-03-15 - 1.13.8 - fix(repo)
no changes to commit
no changes to commit
## 2026-03-15 - 1.13.7 - fix(repo) ## 2026-03-15 - 1.13.7 - fix(repo)
no changes to commit no changes to commit
## 2026-03-15 - 1.13.6 - fix(ci) ## 2026-03-15 - 1.13.6 - fix(ci)
correct workflow container image registry path correct workflow container image registry path
- Update Gitea CI, release, and npm publish workflows to use the corrected ht-docker-node image path - Update Gitea CI, release, and npm publish workflows to use the corrected ht-docker-node image path
- Align all workflow container references from hosttoday to host.today to prevent pipeline image resolution issues - Align all workflow container references from hosttoday to host.today to prevent pipeline image resolution issues
## 2026-03-15 - 1.13.5 - fix(workflows) ## 2026-03-15 - 1.13.5 - fix(workflows)
switch Gitea workflow containers from ht-docker-dbase to ht-docker-node switch Gitea workflow containers from ht-docker-dbase to ht-docker-node
- Updates the CI, release, and npm publish workflows to use the Node-focused container image consistently. - Updates the CI, release, and npm publish workflows to use the Node-focused container image consistently.
- Aligns workflow runtime images with the project's Node and Deno build and publish steps. - Aligns workflow runtime images with the project's Node and Deno build and publish steps.
## 2026-03-15 - 1.13.4 - fix(ci) ## 2026-03-15 - 1.13.4 - fix(ci)
run workflows in the shared build container and enable corepack for pnpm installs run workflows in the shared build container and enable corepack for pnpm installs
- adds the ht-docker-dbase container image to CI, release, and npm publish workflows - adds the ht-docker-dbase container image to CI, release, and npm publish workflows
- enables corepack before pnpm install in build and release jobs to ensure package manager availability - enables corepack before pnpm install in build and release jobs to ensure package manager availability
## 2026-03-15 - 1.13.3 - fix(build) ## 2026-03-15 - 1.13.3 - fix(build)
replace custom Deno compile scripts with tsdeno-based binary builds in CI and release workflows replace custom Deno compile scripts with tsdeno-based binary builds in CI and release workflows
- adds @git.zone/tsdeno as a dev dependency and configures compile targets in npmextra.json - adds @git.zone/tsdeno as a dev dependency and configures compile targets in npmextra.json
@@ -329,18 +427,21 @@ replace custom Deno compile scripts with tsdeno-based binary builds in CI and re
- removes the legacy scripts/compile-all.sh script and points the compile task to tsdeno compile - removes the legacy scripts/compile-all.sh script and points the compile task to tsdeno compile
## 2026-03-15 - 1.13.2 - fix(scripts) ## 2026-03-15 - 1.13.2 - fix(scripts)
install production dependencies before compiling binaries and exclude local node_modules from builds install production dependencies before compiling binaries and exclude local node_modules from builds
- Adds a dependency installation step using the application entrypoint before cross-platform compilation - Adds a dependency installation step using the application entrypoint before cross-platform compilation
- Updates all deno compile targets to use --node-modules-dir=none to avoid bundling local node_modules - Updates all deno compile targets to use --node-modules-dir=none to avoid bundling local node_modules
## 2026-03-15 - 1.13.1 - fix(deno) ## 2026-03-15 - 1.13.1 - fix(deno)
remove nodeModulesDir from Deno configuration remove nodeModulesDir from Deno configuration
- Drops the explicit nodeModulesDir setting from deno.json. - Drops the explicit nodeModulesDir setting from deno.json.
- Keeps the package version unchanged at 1.13.0 while simplifying runtime configuration. - Keeps the package version unchanged at 1.13.0 while simplifying runtime configuration.
## 2026-03-15 - 1.13.0 - feat(install) ## 2026-03-15 - 1.13.0 - feat(install)
improve installer with version selection, service restart handling, and upgrade documentation improve installer with version selection, service restart handling, and upgrade documentation
- Adds installer command-line options for help, specific version selection, and custom install directory. - Adds installer command-line options for help, specific version selection, and custom install directory.
@@ -348,12 +449,14 @@ improve installer with version selection, service restart handling, and upgrade
- Preserves Onebox data directories, stops and restarts the systemd service during updates, and refreshes installation instructions in the README including upgrade usage. - Preserves Onebox data directories, stops and restarts the systemd service during updates, and refreshes installation instructions in the README including upgrade usage.
## 2026-03-15 - 1.12.1 - fix(package.json) ## 2026-03-15 - 1.12.1 - fix(package.json)
update package metadata update package metadata
- Single metadata-only file changed (+1, -1) - Single metadata-only file changed (+1, -1)
- No source code or runtime behavior modified; safe patch release - No source code or runtime behavior modified; safe patch release
## 2026-03-15 - 1.12.0 - feat(cli,release) ## 2026-03-15 - 1.12.0 - feat(cli,release)
add self-upgrade command and automate CI, release, and npm publishing workflows add self-upgrade command and automate CI, release, and npm publishing workflows
- adds a new `onebox upgrade` CLI command that checks the latest release and reinstalls the current binary via the installer script - adds a new `onebox upgrade` CLI command that checks the latest release and reinstalls the current binary via the installer script
@@ -361,6 +464,7 @@ add self-upgrade command and automate CI, release, and npm publishing workflows
- adds a reusable release template describing installation options, supported platforms, and checksum availability - adds a reusable release template describing installation options, supported platforms, and checksum availability
## 2026-03-03 - 1.11.0 - feat(services) ## 2026-03-03 - 1.11.0 - feat(services)
map backend service data to UI components, add stats & logs parsing, fetch service stats, and fix logs request param map backend service data to UI components, add stats & logs parsing, fetch service stats, and fix logs request param
- Fix: rename service logs request property from 'lines' to 'tail' when calling typedRequest - Fix: rename service logs request property from 'lines' to 'tail' when calling typedRequest
@@ -370,21 +474,24 @@ map backend service data to UI components, add stats & logs parsing, fetch servi
- Parse and normalize logs into timestamp/message pairs for the detail view - Parse and normalize logs into timestamp/message pairs for the detail view
## 2026-03-02 - 1.10.3 - fix(bin) ## 2026-03-02 - 1.10.3 - fix(bin)
make bin/onebox-wrapper.js executable make bin/onebox-wrapper.js executable
- Metadata-only change: file mode updated for bin/onebox-wrapper.js to include the executable bit - Metadata-only change: file mode updated for bin/onebox-wrapper.js to include the executable bit
- No source or behavior changes to the code - No source or behavior changes to the code
## 2026-03-02 - 1.10.2 - fix(build) ## 2026-03-02 - 1.10.2 - fix(build)
update build/watch configuration, switch to esbuild bundler and tswatch, and bump catalog and tooling dependencies update build/watch configuration, switch to esbuild bundler and tswatch, and bump catalog and tooling dependencies
- Switch watch script to 'tswatch' (replaced previous concurrently command invoking deno + tswatch). - Switch watch script to 'tswatch' (replaced previous concurrently command invoking deno + tswatch).
- npmextra.json: set bundler to 'esbuild', enable production mode, include html/index.html in the bundle, and extend watchPatterns to include ./html/**/*. - npmextra.json: set bundler to 'esbuild', enable production mode, include html/index.html in the bundle, and extend watchPatterns to include ./html/\*_/_.
- Backend watcher: expanded watch globs and changed command to include --unstable-ffi and runtime flags (--ephemeral --monitor); restart and debounce kept. - Backend watcher: expanded watch globs and changed command to include --unstable-ffi and runtime flags (--ephemeral --monitor); restart and debounce kept.
- Bump runtime deps: @design.estate/dees-catalog -> ^3.43.3, @serve.zone/catalog -> ^2.5.0. - Bump runtime deps: @design.estate/dees-catalog -> ^3.43.3, @serve.zone/catalog -> ^2.5.0.
- Bump devDependencies: @git.zone/tsbundle -> ^2.9.0, @git.zone/tswatch -> ^3.2.0. - Bump devDependencies: @git.zone/tsbundle -> ^2.9.0, @git.zone/tswatch -> ^3.2.0.
## 2026-02-24 - 1.10.1 - fix(package.json) ## 2026-02-24 - 1.10.1 - fix(package.json)
update package metadata update package metadata
- Single metadata-only file changed (+1 -1) - Single metadata-only file changed (+1 -1)
@@ -392,6 +499,7 @@ update package metadata
- Current package version is 1.10.0; recommend patch bump to 1.10.1 - Current package version is 1.10.0; recommend patch bump to 1.10.1
## 2026-02-24 - 1.10.0 - feat(opsserver) ## 2026-02-24 - 1.10.0 - feat(opsserver)
introduce OpsServer (TypedRequest API) and new lightweight web UI; replace legacy Angular UI and add typed interfaces introduce OpsServer (TypedRequest API) and new lightweight web UI; replace legacy Angular UI and add typed interfaces
- Add OpsServer (ts/opsserver) with TypedRequest handlers for admin, services, platform, dns, domains, registry, network, backups, schedules, settings and logs. - Add OpsServer (ts/opsserver) with TypedRequest handlers for admin, services, platform, dns, domains, registry, network, backups, schedules, settings and logs.
@@ -404,21 +512,24 @@ introduce OpsServer (TypedRequest API) and new lightweight web UI; replace legac
- Note: This adds many new endpoints and internal API changes (TypedRequest-based); consumers of the old UI/HTTP endpoints should migrate to the new OpsServer TypedRequest API and web components. - Note: This adds many new endpoints and internal API changes (TypedRequest-based); consumers of the old UI/HTTP endpoints should migrate to the new OpsServer TypedRequest API and web components.
## 2025-12-03 - 1.9.2 - fix(ui) ## 2025-12-03 - 1.9.2 - fix(ui)
Add VS Code configs for the UI workspace and normalize dark theme CSS variables Add VS Code configs for the UI workspace and normalize dark theme CSS variables
- Add VS Code workspace files under ui/.vscode: - Add VS Code workspace files under ui/.vscode:
- - extensions.json: recommend the Angular language support extension - - extensions.json: recommend the Angular language support extension
- - launch.json: Chrome launch configurations for 'ng serve' and 'ng test' (preLaunchTask hooks) - - launch.json: Chrome launch configurations for 'ng serve' and 'ng test' (preLaunchTask hooks)
- - tasks.json: npm 'start' and 'test' tasks with a background TypeScript problem matcher to improve dev workflow - - tasks.json: npm 'start' and 'test' tasks with a background TypeScript problem matcher to improve dev workflow
- Update ui/src/styles.css dark theme variables to use neutral black/gray HSL values for background, foreground, cards, popovers, accents, borders, inputs and ring to improve contrast and consistency - Update ui/src/styles.css dark theme variables to use neutral black/gray HSL values for background, foreground, cards, popovers, accents, borders, inputs and ring to improve contrast and consistency
## 2025-11-27 - 1.9.1 - fix(ui) ## 2025-11-27 - 1.9.1 - fix(ui)
Correct import success toast and add VS Code launch/tasks recommendations for the UI Correct import success toast and add VS Code launch/tasks recommendations for the UI
- Fix backup import success toast in backups-tab.component to reference response.data.service.name (previously response.data.serviceName), preventing incorrect service name display. - Fix backup import success toast in backups-tab.component to reference response.data.service.name (previously response.data.serviceName), preventing incorrect service name display.
- Add VS Code workspace settings for the UI: extensions recommendation, launch configurations for 'ng serve' and 'ng test', and npm tasks for start/test to simplify local development and debugging. - Add VS Code workspace settings for the UI: extensions recommendation, launch configurations for 'ng serve' and 'ng test', and npm tasks for start/test to simplify local development and debugging.
## 2025-11-27 - 1.9.0 - feat(backups) ## 2025-11-27 - 1.9.0 - feat(backups)
Add backup import API and improve backup download/import flow in UI Add backup import API and improve backup download/import flow in UI
- Backend: add /api/backups/import endpoint to accept multipart file uploads or JSON with a URL and import backups (saves temp file, validates .tar.enc, calls backupManager.restoreBackup in import mode). - Backend: add /api/backups/import endpoint to accept multipart file uploads or JSON with a URL and import backups (saves temp file, validates .tar.enc, calls backupManager.restoreBackup in import mode).
@@ -428,6 +539,7 @@ Add backup import API and improve backup download/import flow in UI
- Dev: add VS Code launch, tasks and recommended extensions for the ui workspace to simplify local development. - Dev: add VS Code launch, tasks and recommended extensions for the ui workspace to simplify local development.
## 2025-11-27 - 1.8.0 - feat(backup) ## 2025-11-27 - 1.8.0 - feat(backup)
Add backup scheduling system with GFS retention, API and UI integration Add backup scheduling system with GFS retention, API and UI integration
- Introduce backup scheduling subsystem (BackupScheduler) and integrate it into Onebox lifecycle (init & shutdown) - Introduce backup scheduling subsystem (BackupScheduler) and integrate it into Onebox lifecycle (init & shutdown)
@@ -440,6 +552,7 @@ Add backup scheduling system with GFS retention, API and UI integration
- Type and repository updates across codebase to support schedule-aware backups, schedule CRUD, and retention enforcement - Type and repository updates across codebase to support schedule-aware backups, schedule CRUD, and retention enforcement
## 2025-11-27 - 1.7.0 - feat(backup) ## 2025-11-27 - 1.7.0 - feat(backup)
Add backup system: BackupManager, DB schema, API endpoints and UI support Add backup system: BackupManager, DB schema, API endpoints and UI support
Introduce a complete service backup/restore subsystem with encrypted archives, database records and REST endpoints. Implements BackupManager with export/import for service config, platform resources (MongoDB, MinIO, ClickHouse), and Docker images; adds BackupRepository and migrations for backups table and include_image_in_backup; integrates backup flows into the HTTP API and the UI client; exposes backup password management and restore modes (restore/import/clone). Wire BackupManager into Onebox initialization. Introduce a complete service backup/restore subsystem with encrypted archives, database records and REST endpoints. Implements BackupManager with export/import for service config, platform resources (MongoDB, MinIO, ClickHouse), and Docker images; adds BackupRepository and migrations for backups table and include_image_in_backup; integrates backup flows into the HTTP API and the UI client; exposes backup password management and restore modes (restore/import/clone). Wire BackupManager into Onebox initialization.
@@ -452,6 +565,7 @@ Introduce a complete service backup/restore subsystem with encrypted archives, d
- Integrate BackupManager into Onebox core (initialized in Onebox constructor) and wire HTTP handlers to use the new manager; add DB repository export/import glue so backups are stored and referenced by ID. - Integrate BackupManager into Onebox core (initialized in Onebox constructor) and wire HTTP handlers to use the new manager; add DB repository export/import glue so backups are stored and referenced by ID.
## 2025-11-27 - 1.6.0 - feat(ui.dashboard) ## 2025-11-27 - 1.6.0 - feat(ui.dashboard)
Add Resource Usage card to dashboard and make dashboard cards full-height; add VSCode launch/tasks/config Add Resource Usage card to dashboard and make dashboard cards full-height; add VSCode launch/tasks/config
- Introduce ResourceUsageCardComponent and include it as a full-width row in the dashboard layout. - Introduce ResourceUsageCardComponent and include it as a full-width row in the dashboard layout.
@@ -460,6 +574,7 @@ Add Resource Usage card to dashboard and make dashboard cards full-height; add V
- Add VSCode workspace configuration: recommended Angular extension, launch configurations for ng serve/ng test, and npm tasks to run/start the UI in development. - Add VSCode workspace configuration: recommended Angular extension, launch configurations for ng serve/ng test, and npm tasks to run/start the UI in development.
## 2025-11-27 - 1.5.0 - feat(network) ## 2025-11-27 - 1.5.0 - feat(network)
Add traffic stats endpoint and dashboard UI; enhance platform services and certificate health reporting Add traffic stats endpoint and dashboard UI; enhance platform services and certificate health reporting
- Add /api/network/traffic-stats GET endpoint to the HTTP API with an optional minutes query parameter (validated, 1-60). - Add /api/network/traffic-stats GET endpoint to the HTTP API with an optional minutes query parameter (validated, 1-60).
@@ -471,26 +586,29 @@ Add traffic stats endpoint and dashboard UI; enhance platform services and certi
- Add VSCode workspace launch/tasks recommendations for the UI development environment. - Add VSCode workspace launch/tasks recommendations for the UI development environment.
## 2025-11-26 - 1.4.0 - feat(platform-services) ## 2025-11-26 - 1.4.0 - feat(platform-services)
Add ClickHouse platform service support and improve related healthchecks and tooling Add ClickHouse platform service support and improve related healthchecks and tooling
- Add ClickHouse as a first-class platform service: register provider, provision/cleanup support and env var injection - Add ClickHouse as a first-class platform service: register provider, provision/cleanup support and env var injection
- Expose ClickHouse endpoints in the HTTP API routing (list/get/start/stop/stats) and map default port (8123) - Expose ClickHouse endpoints in the HTTP API routing (list/get/start/stop/stats) and map default port (8123)
- Enable services to request ClickHouse as a platform requirement (enableClickHouse / platformRequirements) during deploy/provision flows - Enable services to request ClickHouse as a platform requirement (enableClickHouse / platformRequirements) during deploy/provision flows
- Fix ClickHouse container health check to use absolute wget path (/usr/bin/wget) for more reliable in-container checks - Fix ClickHouse container health check to use absolute wget path (/usr/bin/wget) for more reliable in-container checks
- Add VS Code workspace launch/tasks/extensions configs for the UI (ui/.vscode/*) to improve local dev experience - Add VS Code workspace launch/tasks/extensions configs for the UI (ui/.vscode/\*) to improve local dev experience
## 2025-11-26 - 1.3.0 - feat(platform-services) ## 2025-11-26 - 1.3.0 - feat(platform-services)
Add ClickHouse platform service support (provider, types, provisioning, UI and port mappings) Add ClickHouse platform service support (provider, types, provisioning, UI and port mappings)
- Introduce ClickHouse as a first-class platform service: added ClickHouseProvider and registered it in PlatformServicesManager - Introduce ClickHouse as a first-class platform service: added ClickHouseProvider and registered it in PlatformServicesManager
- Support provisioning ClickHouse resources for user services and storing encrypted credentials in platform_resources - Support provisioning ClickHouse resources for user services and storing encrypted credentials in platform_resources
- Add ClickHouse to core types (TPlatformServiceType, IPlatformRequirements, IServiceDeployOptions) and service DB handling so services can request ClickHouse - Add ClickHouse to core types (TPlatformServiceType, IPlatformRequirements, IServiceDeployOptions) and service DB handling so services can request ClickHouse
- Inject ClickHouse-related environment variables into deployed services (CLICKHOUSE_* mappings) when provisioning resources - Inject ClickHouse-related environment variables into deployed services (CLICKHOUSE\_\* mappings) when provisioning resources
- Expose ClickHouse default port (8123) in platform port mappings / network targets - Expose ClickHouse default port (8123) in platform port mappings / network targets
- UI: add checkbox and description for enabling ClickHouse during service creation; form now submits enableClickHouse - UI: add checkbox and description for enabling ClickHouse during service creation; form now submits enableClickHouse
- Add VS Code recommendations and launch/tasks for the UI development workflow - Add VS Code recommendations and launch/tasks for the UI development workflow
## 2025-11-26 - 1.2.1 - fix(platform-services/minio) ## 2025-11-26 - 1.2.1 - fix(platform-services/minio)
Improve MinIO provider: reuse existing data and credentials, use host-bound port for provisioning, and safer provisioning/deprovisioning Improve MinIO provider: reuse existing data and credentials, use host-bound port for provisioning, and safer provisioning/deprovisioning
- MinIO provider now detects existing data directory and will reuse stored admin credentials when available instead of regenerating them. - MinIO provider now detects existing data directory and will reuse stored admin credentials when available instead of regenerating them.
@@ -501,15 +619,17 @@ Improve MinIO provider: reuse existing data and credentials, use host-bound port
- Added VSCode workspace files (extensions, launch, tasks) for the ui project to improve developer experience. - Added VSCode workspace files (extensions, launch, tasks) for the ui project to improve developer experience.
## 2025-11-26 - 1.2.0 - feat(ui) ## 2025-11-26 - 1.2.0 - feat(ui)
Sync UI tab state with URL and update routes/links Sync UI tab state with URL and update routes/links
- Add VSCode workspace recommendations, launch and tasks configs for the UI (ui/.vscode/*) - Add VSCode workspace recommendations, launch and tasks configs for the UI (ui/.vscode/\*)
- Update Angular routes to support tab URL segments and default redirects for services, network and registries - Update Angular routes to support tab URL segments and default redirects for services, network and registries
- Change service detail route to use explicit 'detail/:name' path and update links accordingly - Change service detail route to use explicit 'detail/:name' path and update links accordingly
- Make ServicesList, Registries and Network components read tab from route params and navigate on tab changes; add ngOnDestroy to unsubscribe - Make ServicesList, Registries and Network components read tab from route params and navigate on tab changes; add ngOnDestroy to unsubscribe
- Update Domain detail template link to point to the new services detail route - Update Domain detail template link to point to the new services detail route
## 2025-11-26 - 1.1.0 - feat(platform-services) ## 2025-11-26 - 1.1.0 - feat(platform-services)
Add platform service log streaming, improve health checks and provisioning robustness Add platform service log streaming, improve health checks and provisioning robustness
- Add WebSocket log streaming support for platform services (backend + UI) to stream MinIO/MongoDB/Caddy logs in real time - Add WebSocket log streaming support for platform services (backend + UI) to stream MinIO/MongoDB/Caddy logs in real time
@@ -529,6 +649,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased] ## [Unreleased]
### Added ### Added
- Initial project structure - Initial project structure
- Core architecture classes - Core architecture classes
- Docker container management - Docker container management
@@ -547,4 +668,5 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [1.0.0] - TBD ## [1.0.0] - TBD
### Added ### Added
- First stable release - First stable release
+17 -16
View File
@@ -1,6 +1,6 @@
{ {
"name": "@serve.zone/onebox", "name": "@serve.zone/onebox",
"version": "1.22.2", "version": "1.24.7",
"exports": "./mod.ts", "exports": "./mod.ts",
"tasks": { "tasks": {
"test": "deno test --allow-all test/", "test": "deno test --allow-all test/",
@@ -9,24 +9,25 @@
"dev": "pnpm run watch" "dev": "pnpm run watch"
}, },
"imports": { "imports": {
"@std/path": "jsr:@std/path@^1.1.2", "@std/path": "jsr:@std/path@^1.1.4",
"@std/fs": "jsr:@std/fs@^1.0.19", "@std/fs": "jsr:@std/fs@^1.0.23",
"@std/http": "jsr:@std/http@^1.0.21", "@std/http": "jsr:@std/http@^1.1.0",
"@std/assert": "jsr:@std/assert@^1.0.15", "@std/assert": "jsr:@std/assert@^1.0.19",
"@std/encoding": "jsr:@std/encoding@^1.0.10", "@std/encoding": "jsr:@std/encoding@^1.0.10",
"@db/sqlite": "jsr:@db/sqlite@0.12.0", "@db/sqlite": "jsr:@db/sqlite@0.13.0",
"@apiclient.xyz/docker": "npm:@apiclient.xyz/docker@^5.1.1", "@apiclient.xyz/docker": "npm:@apiclient.xyz/docker@^5.1.4",
"@apiclient.xyz/cloudflare": "npm:@apiclient.xyz/cloudflare@6.4.3", "@apiclient.xyz/cloudflare": "npm:@apiclient.xyz/cloudflare@7.1.0",
"@push.rocks/smartacme": "npm:@push.rocks/smartacme@^8.0.0", "@push.rocks/smartacme": "npm:@push.rocks/smartacme@^9.5.0",
"@push.rocks/smartregistry": "npm:@push.rocks/smartregistry@^2.2.0", "@push.rocks/smartregistry": "npm:@push.rocks/smartregistry@^2.9.2",
"@push.rocks/smarts3": "npm:@push.rocks/smarts3@^5.1.0", "@push.rocks/smartstorage": "npm:@push.rocks/smartstorage@^6.5.1",
"@push.rocks/taskbuffer": "npm:@push.rocks/taskbuffer@^3.1.0", "@push.rocks/taskbuffer": "npm:@push.rocks/taskbuffer@^8.0.2",
"@api.global/typedrequest-interfaces": "npm:@api.global/typedrequest-interfaces@^3.0.19", "@api.global/typedrequest-interfaces": "npm:@api.global/typedrequest-interfaces@^3.0.19",
"@api.global/typedrequest": "npm:@api.global/typedrequest@^3.2.6", "@api.global/typedrequest": "npm:@api.global/typedrequest@^3.3.1",
"@api.global/typedserver": "npm:@api.global/typedserver@^8.3.1", "@api.global/typedserver": "npm:@api.global/typedserver@^8.4.6",
"@push.rocks/smartguard": "npm:@push.rocks/smartguard@^3.1.0", "@push.rocks/smartguard": "npm:@push.rocks/smartguard@^3.1.0",
"@push.rocks/smartjwt": "npm:@push.rocks/smartjwt@^2.2.1", "@push.rocks/smartjwt": "npm:@push.rocks/smartjwt@^2.2.2",
"@api.global/typedsocket": "npm:@api.global/typedsocket@^4.1.2" "@api.global/typedsocket": "npm:@api.global/typedsocket@^4.1.3",
"@serve.zone/containerarchive": "npm:@serve.zone/containerarchive@^0.1.3"
}, },
"compilerOptions": { "compilerOptions": {
"lib": [ "lib": [
-36196
View File
File diff suppressed because one or more lines are too long
-33
View File
@@ -1,33 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta
name="viewport"
content="user-scalable=0, initial-scale=1, maximum-scale=1, minimum-scale=1, width=device-width, height=device-height"
/>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<meta name="theme-color" content="#000000" />
<title>Onebox</title>
<link rel="preconnect" href="https://assetbroker.lossless.one/" crossorigin>
<link rel="stylesheet" href="https://assetbroker.lossless.one/fonts/fonts.css">
<style>
html {
-ms-text-size-adjust: 100%;
-webkit-text-size-adjust: 100%;
}
body {
position: relative;
background: #000;
margin: 0px;
}
</style>
</head>
<body>
<noscript>
<p style="color: #fff; text-align: center; margin-top: 100px;">
JavaScript is required to run the Onebox dashboard.
</p>
</noscript>
</body>
<script defer type="module" src="/bundle.js"></script>
</html>
+1 -1
View File
@@ -305,6 +305,6 @@ else
echo " onebox service add myapp --image nginx:latest --domain app.example.com" echo " onebox service add myapp --image nginx:latest --domain app.example.com"
echo "" echo ""
echo " Web UI: http://localhost:3000" echo " Web UI: http://localhost:3000"
echo " Default credentials: admin / admin" echo " Initial admin credentials are written to the service logs unless ONEBOX_ADMIN_PASSWORD is set."
fi fi
echo "" echo ""
+13 -9
View File
@@ -1,6 +1,6 @@
{ {
"name": "@serve.zone/onebox", "name": "@serve.zone/onebox",
"version": "1.22.2", "version": "1.24.7",
"description": "Self-hosted container platform with automatic SSL and DNS - a mini Heroku for single servers", "description": "Self-hosted container platform with automatic SSL and DNS - a mini Heroku for single servers",
"main": "mod.ts", "main": "mod.ts",
"type": "module", "type": "module",
@@ -26,7 +26,7 @@
"paas", "paas",
"deployment" "deployment"
], ],
"author": "Lossless GmbH", "author": "Task Venture Capital GmbH",
"license": "MIT", "license": "MIT",
"repository": { "repository": {
"type": "git", "type": "git",
@@ -55,14 +55,18 @@
"packageManager": "pnpm@10.18.1+sha512.77a884a165cbba2d8d1c19e3b4880eee6d2fcabd0d879121e282196b80042351d5eb3ca0935fa599da1dc51265cc68816ad2bddd2a2de5ea9fdf92adbec7cd34", "packageManager": "pnpm@10.18.1+sha512.77a884a165cbba2d8d1c19e3b4880eee6d2fcabd0d879121e282196b80042351d5eb3ca0935fa599da1dc51265cc68816ad2bddd2a2de5ea9fdf92adbec7cd34",
"dependencies": { "dependencies": {
"@api.global/typedrequest-interfaces": "^3.0.19", "@api.global/typedrequest-interfaces": "^3.0.19",
"@api.global/typedsocket": "^4.1.2", "@api.global/typedsocket": "^4.1.3",
"@design.estate/dees-catalog": "^3.43.3", "@design.estate/dees-catalog": "^3.81.0",
"@design.estate/dees-element": "^2.1.6", "@design.estate/dees-element": "^2.2.4",
"@serve.zone/catalog": "^2.8.0" "@serve.zone/catalog": "^2.12.4"
}, },
"devDependencies": { "devDependencies": {
"@git.zone/tsbundle": "^2.9.0", "@git.zone/tsbundle": "^2.10.1",
"@git.zone/tsdeno": "^1.2.0", "@git.zone/tsdeno": "^1.3.1",
"@git.zone/tswatch": "^3.2.0" "@git.zone/tswatch": "^3.3.3"
},
"private": true,
"pnpm": {
"overrides": {}
} }
} }
+966 -1106
View File
File diff suppressed because it is too large Load Diff
+25 -16
View File
@@ -3,6 +3,7 @@
## SSL Certificate Storage (November 2025) ## SSL Certificate Storage (November 2025)
SSL certificates are now stored directly in the SQLite database as PEM content instead of file paths: SSL certificates are now stored directly in the SQLite database as PEM content instead of file paths:
- `ISslCertificate` and `ICertificate` interfaces use `certPem`, `keyPem`, `fullchainPem` properties - `ISslCertificate` and `ICertificate` interfaces use `certPem`, `keyPem`, `fullchainPem` properties
- Database migration 8 converted the `certificates` table schema - Database migration 8 converted the `certificates` table schema
- No filesystem storage for certificates - everything in DB - No filesystem storage for certificates - everything in DB
@@ -16,6 +17,7 @@ SSL certificates are now stored directly in the SQLite database as PEM content i
The database layer has been refactored into a repository pattern: The database layer has been refactored into a repository pattern:
**Directory Structure:** **Directory Structure:**
``` ```
ts/database/ ts/database/
├── index.ts # Main OneboxDatabase class (composes repositories, handles migrations) ├── index.ts # Main OneboxDatabase class (composes repositories, handles migrations)
@@ -32,52 +34,59 @@ ts/database/
``` ```
**Import paths:** **Import paths:**
- Main: `import { OneboxDatabase } from './database/index.ts'` - Main: `import { OneboxDatabase } from './database/index.ts'`
- Legacy (deprecated): `import { OneboxDatabase } from './classes/database.ts'` (re-exports from new location) - Legacy (deprecated): `import { OneboxDatabase } from './classes/database.ts'` (re-exports from new location)
**API Compatibility:** **API Compatibility:**
- The `OneboxDatabase` class maintains the same public API - The `OneboxDatabase` class maintains the same public API
- All methods delegate to the appropriate repository - All methods delegate to the appropriate repository
- No breaking changes for existing code - No breaking changes for existing code
## Current Migration Version: 8 ## Current Migration Version: 15
Migration 8 converted certificate storage from file paths to PEM content. Migration 15 renames the core reverse proxy platform service from `caddy` to `smartproxy`.
## Reverse Proxy (November 2025 - Caddy Docker Service) ## Reverse Proxy (April 2026 - SmartProxy Docker Service)
The reverse proxy uses **Caddy** running as a Docker Swarm service for production-grade reverse proxying with native SNI support, HTTP/2, HTTP/3, and WebSocket handling. The reverse proxy uses **SmartProxy** running as a Docker Swarm service for production-grade reverse proxying with TLS termination and WebSocket handling.
**Architecture:** **Architecture:**
- Caddy runs as Docker Swarm service (`onebox-caddy`) on the overlay network
- No binary download required - uses `caddy:2-alpine` Docker image - SmartProxy runs as Docker Swarm service (`onebox-smartproxy`) on the overlay network
- Configuration pushed dynamically via Caddy Admin API (port 2019) - No host binary download required - uses `code.foss.global/host.today/ht-docker-smartproxy:latest`
- Routes are pushed dynamically via the SmartProxy admin API (host port 2019)
- Automatic HTTPS disabled - certificates managed externally via SmartACME - Automatic HTTPS disabled - certificates managed externally via SmartACME
- Zero-downtime configuration updates - Zero-downtime configuration updates
- Services reached by Docker service name (e.g., `onebox-hello-world:80`) - Services reached by Docker service name (e.g., `onebox-hello-world:80`)
**Key files:** **Key files:**
- `ts/classes/caddy.ts` - CaddyManager class for Docker service and Admin API
- `ts/classes/reverseproxy.ts` - Delegates to CaddyManager - `ts/classes/smartproxy.ts` - SmartProxyManager class for Docker service and Admin API
- `ts/classes/reverseproxy.ts` - Delegates to SmartProxyManager
**Certificate workflow:** **Certificate workflow:**
1. `CertRequirementManager` creates requirements for domains 1. `CertRequirementManager` creates requirements for domains
2. Daemon processes requirements via `certmanager.ts` 2. Daemon processes requirements via `certmanager.ts`
3. Certificates stored in database (PEM content) 3. Certificates stored in database (PEM content)
4. `reverseProxy.addCertificate()` passes PEM content to Caddy via `load_pem` (inline in config) 4. `reverseProxy.addCertificate()` passes PEM content to SmartProxy route config
5. Caddy serves TLS with the loaded certificates (no volume mounts needed) 5. SmartProxy serves TLS with the loaded certificates (no volume mounts needed)
**Docker Service Configuration:** **Docker Service Configuration:**
- Service name: `onebox-caddy`
- Image: `caddy:2-alpine` - Service name: `onebox-smartproxy`
- Image: `code.foss.global/host.today/ht-docker-smartproxy:latest`
- Network: `onebox-network` (overlay, attachable) - Network: `onebox-network` (overlay, attachable)
- Startup: Writes initial config with `admin.listen: 0.0.0.0:2019` for host access - Startup: SmartProxy daemon admin API listens on container port 3000, published on host port 2019
**Port Mapping:** **Port Mapping:**
- Dev mode: HTTP on 8080, HTTPS on 8443, Admin on 2019 - Dev mode: HTTP on 8080, HTTPS on 8443, Admin on 2019
- Production: HTTP on 80, HTTPS on 443, Admin on 2019 - Production: HTTP on 80, HTTPS on 443, Admin on 2019
- All ports use `PublishMode: 'host'` for direct binding - All ports use `PublishMode: 'host'` for direct binding
**Log Receiver:** **Log Receiver:**
- Caddy sends access logs to `tcp/172.17.0.1:9999` (Docker bridge gateway)
- `CaddyLogReceiver` on host receives and processes logs - `ProxyLogReceiver` remains the host-side access-log stream endpoint for proxy log integrations
+181 -518
View File
@@ -1,590 +1,253 @@
# @serve.zone/onebox # @serve.zone/onebox
> 🚀 Self-hosted Docker Swarm platform with Caddy reverse proxy, automatic SSL, and real-time WebSocket updates Onebox is a self-hosted application platform for a single server. It combines Docker, SmartProxy routing, a typed web control plane, app templates, platform services, and containerarchive-powered backups into one Deno-distributed binary.
**Onebox** transforms any Linux server into a powerful container hosting platform. Deploy Docker Swarm services with automatic HTTPS, DNS configuration, and Caddy reverse proxy running as a Docker service - all managed through a beautiful Angular web interface with real-time updates.
## Issue Reporting and Security ## Issue Reporting and Security
For reporting bugs, issues, or security vulnerabilities, please visit [community.foss.global/](https://community.foss.global/). This is the central community hub for all issue reporting. Developers who sign and comply with our contribution agreement and go through identification can also get a [code.foss.global/](https://code.foss.global/) account to submit Pull Requests directly. For reporting bugs, issues, or security vulnerabilities, please visit [community.foss.global/](https://community.foss.global/). This is the central community hub for all issue reporting. Developers who sign and comply with our contribution agreement and go through identification can also get a [code.foss.global/](https://code.foss.global/) account to submit Pull Requests directly.
## What Makes Onebox Different? 🎯 ## What Onebox Does
- **Caddy Reverse Proxy in Docker** - Production-grade HTTP/HTTPS proxy running as a Swarm service with native service discovery, HTTP/2, HTTP/3, and bidirectional WebSocket proxying Onebox turns a Linux host into a small PaaS that can run your own containers and curated app templates without a separate control plane. It is designed for the "one good server" use case: one machine, one local Docker runtime, one web dashboard, one operational surface.
- **Docker Swarm First** - All workloads (including the reverse proxy!) run as Swarm services on the overlay network for seamless service-to-service communication
- **Real-time Everything** - WebSocket-powered live updates for service status, logs, and metrics across all connected clients
- **Single Executable** - Compiles to a standalone binary - just run it, no dependencies
- **Private Registry Included** - Built-in Docker registry with token-based auth and auto-deploy on push
- **Zero Config SSL** - Automatic Let's Encrypt certificates with inline `load_pem` (no volume mounts needed)
- **Cloudflare Integration** - Automatic DNS record management and zone synchronization
- **Modern Stack** - Deno runtime + SQLite database + Angular 19 UI
## Features ✨ - Deploys Docker workloads from external images or Onebox App Store templates.
- Uses the local Docker socket and creates the `onebox-network` network automatically.
- Runs workloads as Docker Swarm services when Swarm is active, otherwise as standalone containers.
- Starts a SmartProxy-backed reverse proxy for HTTP/S routing and WebSocket traffic.
- Serves the web UI and TypedRequest/TypedSocket API through `OpsServer` on port `3000` by default.
- Stores platform state in SQLite.
- Can provision app dependencies through local platform providers: MongoDB, MinIO/S3, ClickHouse, MariaDB, and Redis.
- Tracks domains, Cloudflare DNS records, ACME certificates, service logs, metrics, backup schedules, and app template metadata.
- Can sync routes and import certificates from an external `dcrouter` gateway when configured.
### Core Platform ## Architecture
- 🐳 **Docker Swarm Management** - Deploy, scale, and orchestrate services with Swarm mode
- 🌐 **Caddy Reverse Proxy** - Production-grade proxy running as Docker service with SNI, HTTP/2, HTTP/3
- 🔒 **Automatic SSL Certificates** - Let's Encrypt integration with hot-reload and renewal monitoring
- ☁️ **Cloudflare DNS Integration** - Automatic DNS record creation and zone synchronization
- 📦 **Built-in Registry** - Private Docker registry with per-service tokens and auto-update
- 🔄 **Real-time WebSocket Updates** - Live service status, logs, and system events
### Monitoring & Management ```text
- 📊 **Metrics Collection** - Historical CPU, memory, and network stats (every 60s) browser / CLI
- 📝 **Centralized Logging** - Container logs with streaming and retention policies |
- 🎨 **Angular Web UI** - Modern, responsive interface with real-time updates v
- 👥 **Multi-user Support** - Role-based access control (admin/user) OpsServer :3000
- 💾 **SQLite Database** - Embedded, zero-configuration storage - bundled web UI
- TypedRequest handlers
- TypedSocket dashboard events
|
v
Onebox coordinator
- SQLite repositories
- Docker manager
- SmartProxy route manager
- DNS and SSL managers
- platform service providers
- app store manager
- backup manager and scheduler
|
v
Docker host
- onebox-network
- SmartProxy
- user services
- optional platform services
```
### Developer Experience `Onebox` is the central class. It initializes the database, Docker, SmartProxy, DNS, SSL, platform services, App Store, backup subsystem, optional external gateway integration, and the web/API server.
- 🚀 **Auto-update on Push** - Push to registry and services update automatically
- 🔐 **Private Registry Support** - Use Docker Hub, Gitea, or custom registries
- 🔄 **Systemd Integration** - Run as a daemon with auto-restart
- 🎛️ **Full CLI & API** - Manage everything from terminal or HTTP API
## Quick Start 🏁 ## Installation
### Installation Install the released binary:
```bash ```bash
# One-line install (recommended)
curl -sSL https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh | sudo bash curl -sSL https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh | sudo bash
# Install a specific version
curl -sSL https://code.foss.global/serve.zone/onebox/raw/branch/main/install.sh | sudo bash -s -- --version v1.11.0
# Or install from npm
pnpm install -g @serve.zone/onebox
``` ```
### First Run For published wrapper builds, install with pnpm:
```bash
pnpm add --global @serve.zone/onebox
```
This repository currently marks the package as private; use the install script or a released wrapper package when available.
The package wrapper downloads the platform-specific binary during postinstall. Current release assets are named for Linux, macOS, and Windows on x64/ARM64 where available.
## Quick Start
Run a foreground development instance:
```bash ```bash
# Start the server in development mode
onebox server --ephemeral onebox server --ephemeral
# In another terminal, deploy your first service
onebox service add myapp \
--image nginx:latest \
--domain app.example.com \
--port 80
``` ```
### Access the Web UI Open the dashboard:
Open `http://localhost:3000` in your browser. ```text
http://localhost:3000
```
**Default credentials:** Default bootstrap credentials are created when no admin user exists:
- Username: `admin`
- Password: `admin`
⚠️ **Change the default password immediately after first login!** ```text
username: admin
password: admin
```
### Production Setup Change the default password immediately after first login.
Deploy a simple service:
```bash ```bash
# Install as systemd service onebox service add web --image nginx:latest --domain web.example.com --port 80
sudo onebox daemon install
# Start the daemon
sudo onebox daemon start
# View logs
sudo onebox daemon logs
``` ```
## Architecture 🏗️ For production, install and run the systemd service:
Onebox is built with modern technologies for performance and developer experience:
```
┌─────────────────────────────────────────────────┐
│ Angular 19 Web UI │
│ (Real-time WebSocket Updates) │
└─────────────────┬───────────────────────────────┘
│ HTTP/WS
┌─────────────────▼───────────────────────────────┐
│ Deno HTTP Server (Port 3000) │
│ REST API + WebSocket Broadcast │
└─────────────────┬───────────────────────────────┘
┌─────────────────▼───────────────────────────────┐
│ Docker Swarm │
│ ┌──────────────────────────────┐ │
│ │ onebox-network (overlay) │ │
│ ├──────────────────────────────┤ │
│ │ onebox-caddy (Caddy proxy) │ │
│ │ HTTP (80) + HTTPS (443) │ │
│ │ Admin API → config updates │ │
│ ├──────────────────────────────┤ │
│ │ Your Services │ │
│ │ (reachable by service name) │ │
│ └──────────────────────────────┘ │
└─────┬───────────────────────────────────────────┘
├──► SSL Certificate Manager (Let's Encrypt)
├──► Cloudflare DNS Manager
├──► Built-in Docker Registry
└──► SQLite Database
```
### Core Components
| Component | Description |
|-----------|-------------|
| **Deno Runtime** | Modern TypeScript with built-in security |
| **Caddy Reverse Proxy** | Docker Swarm service with HTTP/2, HTTP/3, SNI, and WebSocket support |
| **Docker Swarm** | Container orchestration (all workloads run as services) |
| **SQLite Database** | Configuration, metrics, and user data |
| **WebSocket Server** | Real-time bidirectional communication |
| **Let's Encrypt** | Automatic SSL certificate management |
| **Cloudflare API** | DNS record automation |
## CLI Reference 📖
### Service Management
```bash ```bash
# Deploy a service sudo onebox systemd enable
onebox service add <name> --image <image> --domain <domain> [--port <port>] [--env KEY=VALUE] sudo onebox systemd start
sudo onebox systemd logs
# Deploy with Onebox Registry (auto-update on push)
onebox service add myapp --use-onebox-registry --domain myapp.example.com
# List services
onebox service list
# Control services
onebox service start <name>
onebox service stop <name>
onebox service restart <name>
# Remove service
onebox service remove <name>
# View logs
onebox service logs <name>
``` ```
### Server Management The systemd unit runs `onebox systemd start-daemon` with `/var/lib/onebox` as its working directory. From source or foreground runs, the default SQLite path is `./.nogit/onebox.db` relative to the current working directory.
## CLI Reference
```bash ```bash
# Start server (development) onebox <command> [options]
onebox server --ephemeral # Runs in foreground with monitoring
# Start server (production)
onebox daemon install # Install systemd service
onebox daemon start # Start daemon
onebox daemon stop # Stop daemon
onebox daemon logs # View logs
``` ```
### Registry Management Core commands:
| Command | Purpose |
| --- | --- |
| `server [--ephemeral] [--port <port>] [--monitor]` | Start the web/API server in the foreground. |
| `service add <name> --image <image> [--domain <domain>] [--port <port>] [--env KEY=VALUE]` | Deploy a workload. |
| `service list` | List known services. |
| `service start <name>` | Start a stopped service. |
| `service stop <name>` | Stop a running service. |
| `service restart <name>` | Restart a service. |
| `service remove <name>` | Remove a service and its route. |
| `service logs <name>` | Print Docker logs for a service. |
| `appstore list` | List remote app templates. |
| `appstore config <app-id> [--version <version>]` | Print app metadata and version config. |
| `appstore install <app-id> --name <name> [--domain <domain>] [--version <version>] [--env KEY=VALUE]` | Install an app template. |
| `registry add --url <url> --username <user> --password <pass>` | Store external registry credentials. |
| `registry remove --url <url>` | Remove registry credentials. |
| `registry list` | List configured registries. |
| `dns add <domain>` | Add a DNS record through the configured DNS manager. |
| `dns sync` | Sync Cloudflare domains into Onebox. |
| `ssl renew [domain]` | Renew one certificate or expiring certificates. |
| `ssl list` | List stored certificates. |
| `ssl force-renew <domain>` | Force certificate renewal for a domain. |
| `proxy reload` | Reload routes and certificates into SmartProxy. |
| `proxy test` | Check reverse proxy state. |
| `proxy status` | Print route/certificate counts and ports. |
| `systemd enable` | Install and enable the systemd unit. |
| `systemd disable` | Stop, disable, and remove the systemd unit. |
| `systemd start` | Start Onebox through systemd. |
| `systemd stop` | Stop Onebox through systemd. |
| `systemd status` | Show service status. |
| `systemd logs` | Follow `journalctl` logs. |
| `config show` | Show stored settings with secret values masked. |
| `config set <key> <value>` | Store a setting or supported secret setting. |
| `status` | Print JSON system status. |
| `upgrade` | Install the latest released binary. Requires root. |
The legacy `nginx` command name is still accepted as an alias for `proxy`, but SmartProxy is the active proxy backend.
## Configuration Notes
Useful settings include:
| Setting | Purpose |
| --- | --- |
| `serverIP` | IP address used for DNS records. |
| `cloudflareToken` | Cloudflare API token. `cloudflareAPIKey` is accepted as a legacy alias. |
| `cloudflareZoneId` | Cloudflare zone identifier. |
| `acmeEmail` | ACME account email for certificate issuance. |
| `httpPort` | OpsServer/web UI port. Defaults to `3000`. |
| `metricsInterval` | Metrics collection interval in milliseconds. |
| `backupPassword` | Secret passphrase for encrypted backup repositories. |
| `dcrouterGatewayUrl` | Optional external dcrouter API endpoint. |
| `dcrouterGatewayApiToken` | Optional external dcrouter API token. |
| `dcrouterWorkHosterId` | Optional work hoster identity used for route ownership. |
| `dcrouterTargetHost` | Optional target host advertised to dcrouter. |
| `dcrouterTargetPort` | Optional target port advertised to dcrouter. |
Example:
```bash ```bash
# Add external registry credentials onebox config set serverIP 203.0.113.10
onebox registry add --url registry.example.com --username user --password pass onebox config set acmeEmail ops@example.com
onebox config set cloudflareToken cf-token
# List registries onebox config set cloudflareZoneId zone-id
onebox registry list
# Remove registry
onebox registry remove <url>
``` ```
### DNS Management ## App Store
The App Store manager fetches catalog data from `serve.zone/appstore-apptemplates` and caches it briefly. Templates can declare platform requirements, so installing an app can automatically provision MongoDB, S3-compatible storage, ClickHouse, Redis, or MariaDB resources and inject the resulting credentials as environment variables.
```bash ```bash
# Add DNS record (requires Cloudflare config) onebox appstore list
onebox dns add <domain> onebox appstore config cloudly
onebox appstore install cloudly --name cloudly --domain cloudly.example.com --env SERVEZONE_ADMINACCOUNT=admin:change-me
# List DNS records
onebox dns list
# Sync from Cloudflare
onebox dns sync
# Remove DNS record
onebox dns remove <domain>
``` ```
### SSL Management ## Backups
Backups are built around `@serve.zone/containerarchive`. Onebox exports service configuration, platform resource metadata, supported platform data, and optionally Docker images into a content-addressed archive repository. The code also keeps compatibility paths for older `.tar.enc` backup flows.
Backup and schedule operations are primarily exposed through the OpsServer/web UI handlers.
## Development
Requirements:
- Deno for the application runtime.
- pnpm for package scripts.
- Docker for any runtime path that initializes Onebox fully.
Common tasks:
```bash ```bash
# Renew expiring certificates
onebox ssl renew
# Force renew specific domain
onebox ssl force-renew <domain>
# List certificates
onebox ssl list
```
### Configuration
```bash
# Show all settings
onebox config show
# Set configuration value
onebox config set <key> <value>
# Example: Configure Cloudflare
onebox config set cloudflareAPIKey your-api-key
onebox config set cloudflareEmail your@email.com
onebox config set cloudflareZoneID your-zone-id
```
### System Status
```bash
# Get full system status
onebox status
```
### Upgrade
```bash
# Upgrade to the latest version (requires root)
sudo onebox upgrade
```
## Configuration 🔧
### System Requirements
- **Linux** (x64 or ARM64)
- **Docker** installed and running
- **Docker Swarm** initialized (`docker swarm init`)
- **Root/sudo access** for ports 80/443
- **(Optional) Cloudflare account** for DNS automation
### Data Locations
| Data | Location |
|------|----------|
| **Database** | `./onebox.db` (or custom path) |
| **SSL Certificates** | Managed by CertManager |
| **Registry Data** | `./.nogit/registry-data` |
### Environment Variables
```bash
# Database location
ONEBOX_DB_PATH=/path/to/onebox.db
# HTTP server port (default: 3000)
ONEBOX_HTTP_PORT=3000
# Enable debug logging
ONEBOX_DEBUG=true
```
## Development 💻
### Setup
```bash
# Clone repository
git clone https://code.foss.global/serve.zone/onebox
cd onebox
# Start development server (auto-restart on changes)
pnpm run watch pnpm run watch
``` pnpm build
### Tasks
```bash
# Development server (auto-restart on changes)
deno task dev
# Run tests
deno task test deno task test
# Watch mode for tests
deno task test:watch deno task test:watch
# Compile binaries for all platforms
deno task compile deno task compile
``` ```
### Project Structure Source map:
``` | Path | Purpose |
onebox/ | --- | --- |
├── ts/ | `mod.ts` | Deno entry point. |
│ ├── classes/ # Core implementations | `ts/cli.ts` | CLI router and command help. |
│ │ ├── onebox.ts # Main coordinator | `ts/classes/onebox.ts` | Main coordinator. |
│ │ ├── reverseproxy.ts # Reverse proxy orchestration | `ts/classes/docker.ts` | Docker client, networks, containers, and Swarm services. |
│ │ ├── caddy.ts # Caddy Docker service management | `ts/classes/reverseproxy.ts` | SmartProxy route and certificate bridge. |
│ │ ├── docker.ts # Docker Swarm API | `ts/classes/platform-services/` | Local platform service providers. |
│ │ ├── httpserver.ts # REST API + WebSocket | `ts/classes/appstore.ts` | Remote App Store catalog and upgrade logic. |
│ │ ├── services.ts # Service orchestration | `ts/classes/backup-manager.ts` | Backup and restore orchestration. |
│ │ ├── certmanager.ts # SSL certificate management | `ts/opsserver/` | Web UI server and TypedRequest handlers. |
│ │ ├── cert-requirement-manager.ts # Certificate requirements | `ts/database/` | SQLite repositories and migrations. |
│ │ ├── ssl.ts # SSL utilities | `ts_web/` | Dashboard source. |
│ │ ├── registry.ts # Built-in Docker registry
│ │ ├── registries.ts # External registry management
│ │ ├── dns.ts # DNS record management
│ │ ├── cloudflare-sync.ts # Cloudflare zone sync
│ │ ├── daemon.ts # Systemd daemon management
│ │ └── apiclient.ts # API client utilities
│ ├── database/ # Database layer (repository pattern)
│ │ ├── index.ts # Main OneboxDatabase class
│ │ ├── base.repository.ts # Base repository class
│ │ └── repositories/ # Domain-specific repositories
│ │ ├── service.repository.ts
│ │ ├── certificate.repository.ts
│ │ ├── auth.repository.ts
│ │ ├── metrics.repository.ts
│ │ └── ...
│ ├── cli.ts # CLI router
│ ├── types.ts # TypeScript interfaces
│ ├── logging.ts # Logging utilities
│ └── plugins.ts # Dependency imports
├── ui/ # Angular 19 web interface
├── test/ # Test files
├── mod.ts # Main entry point
└── deno.json # Deno configuration
```
### API Endpoints
The HTTP server exposes a comprehensive REST API:
#### Authentication
| Method | Endpoint | Description |
|--------|----------|-------------|
| `POST` | `/api/auth/login` | User authentication (returns token) |
#### Services
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | `/api/services` | List all services |
| `POST` | `/api/services` | Create/deploy service |
| `GET` | `/api/services/:name` | Get service details |
| `PUT` | `/api/services/:name` | Update service |
| `DELETE` | `/api/services/:name` | Delete service |
| `POST` | `/api/services/:name/start` | Start service |
| `POST` | `/api/services/:name/stop` | Stop service |
| `POST` | `/api/services/:name/restart` | Restart service |
| `GET` | `/api/services/:name/logs` | Get service logs |
| `WS` | `/api/services/:name/logs/stream` | Stream logs via WebSocket |
#### SSL Certificates
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | `/api/ssl/list` | List all certificates |
| `GET` | `/api/ssl/:domain` | Get certificate details |
| `POST` | `/api/ssl/obtain` | Request new certificate |
| `POST` | `/api/ssl/:domain/renew` | Force renew certificate |
#### Domains
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | `/api/domains` | List all domains |
| `GET` | `/api/domains/:domain` | Get domain details |
| `POST` | `/api/domains/sync` | Sync domains from Cloudflare |
#### DNS Records
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | `/api/dns` | List DNS records |
| `POST` | `/api/dns` | Create DNS record |
| `DELETE` | `/api/dns/:domain` | Delete DNS record |
| `POST` | `/api/dns/sync` | Sync DNS from Cloudflare |
#### Registry
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | `/api/registry/tags/:service` | Get registry tags for service |
| `GET` | `/api/registry/tokens` | List registry tokens |
| `POST` | `/api/registry/tokens` | Create registry token |
| `DELETE` | `/api/registry/tokens/:id` | Delete registry token |
#### System
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | `/api/status` | System status |
| `GET` | `/api/settings` | Get settings |
| `PUT` | `/api/settings` | Update settings |
| `WS` | `/api/ws` | WebSocket for real-time updates |
### WebSocket Messages
Real-time updates are broadcast via WebSocket:
```typescript
// Service lifecycle updates
{
type: 'service_update',
action: 'created' | 'updated' | 'deleted' | 'started' | 'stopped',
service: { id, name, status, ... }
}
// Service status changes
{
type: 'service_status',
service: { id, name, status, ... }
}
// System status updates
{
type: 'system_status',
status: { docker, reverseProxy, services, ... }
}
```
## Advanced Usage 🚀
### Using the Built-in Registry
```bash
# Deploy a service with Onebox Registry
onebox service add myapp \
--use-onebox-registry \
--domain myapp.example.com \
--auto-update-on-push
# Get the registry token for pushing images
# (Token is automatically created and stored in database)
# Push your image
docker tag myimage:latest localhost:4000/myapp:latest
docker push localhost:4000/myapp:latest
# Service automatically updates! 🎉
```
### Registry Token Management
```bash
# Create a CI/CD token via API
curl -X POST http://localhost:3000/api/registry/tokens \
-H "Authorization: Bearer $TOKEN" \
-H "Content-Type: application/json" \
-d '{"name": "github-actions", "type": "ci", "scope": ["myapp"], "expiresIn": "90d"}'
# Use token for docker login
docker login localhost:4000 -u ci -p <token>
```
### Cloudflare DNS Integration
```bash
# Configure Cloudflare (one-time setup)
onebox config set cloudflareAPIKey your-api-key
onebox config set cloudflareEmail your@email.com
onebox config set cloudflareZoneID your-zone-id
# Deploy with automatic DNS
onebox service add myapp \
--image nginx:latest \
--domain myapp.example.com
# DNS record is automatically created!
# Sync all domains from Cloudflare
onebox dns sync
```
### SSL Certificate Management
SSL certificates are automatically obtained and renewed:
- ✅ Certificates are requested when a service with a domain is deployed
- ✅ Renewal happens automatically 30 days before expiry
- ✅ Certificates are hot-reloaded without downtime
- ✅ Force renewal: `onebox ssl force-renew <domain>`
### Monitoring and Metrics
Metrics are collected every 60 seconds (configurable):
```bash
# Set metrics interval (milliseconds)
onebox config set metricsInterval 30000
# View in web UI or query database directly
sqlite3 onebox.db "SELECT * FROM metrics WHERE service_id = 1 ORDER BY timestamp DESC LIMIT 10"
```
## Troubleshooting 🔧
### Docker Swarm Not Initialized
```bash
# Initialize Docker Swarm
docker swarm init
# Verify swarm mode
docker info | grep "Swarm: active"
```
### Port Already in Use
```bash
# Check what's using port 80/443
sudo lsof -i :80
sudo lsof -i :443
# Kill the process or change Onebox ports
onebox config set httpPort 8080
```
### SSL Certificate Issues
```bash
# Check certificate status
onebox ssl list
# Verify DNS is pointing to your server
dig +short yourdomain.com
# Force certificate renewal
onebox ssl force-renew yourdomain.com
```
### WebSocket Connection Issues
- ✅ Ensure firewall allows WebSocket connections
- ✅ Check browser console for connection errors
- ✅ Verify `/api/ws` endpoint is accessible
### Service Not Starting
```bash
# Check Docker logs
docker service logs <service-name>
# Check Onebox logs
onebox daemon logs
# Verify image exists
docker images | grep <image-name>
```
## License and Legal Information ## License and Legal Information
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. This repository contains open-source code licensed under the MIT License. A copy of the license can be found in the [license](./license) file.
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file. **Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
### Trademarks ### Trademarks
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH. This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH or third parties, and are not included within the scope of the MIT license granted herein.
### Issue Reporting and Security Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines or the guidelines of the respective third-party owners, and any usage must be approved in writing. Third-party trademarks used herein are the property of their respective owners and used only in a descriptive manner, e.g. for an implementation of an API or similar.
For reporting bugs, issues, or security vulnerabilities, please visit [community.foss.global/](https://community.foss.global/). This is the central community hub for all issue reporting. Developers who sign and comply with our contribution agreement and go through identification can also get a [code.foss.global/](https://code.foss.global/) account to submit Pull Requests directly.
### Company Information ### Company Information
Task Venture Capital GmbH Task Venture Capital GmbH\
Registered at District court Bremen HRB 35230 HB, Germany Registered at District Court Bremen HRB 35230 HB, Germany
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc. For any legal inquiries or further information, please contact us via email at hello@task.vc.
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works. By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
+100
View File
@@ -0,0 +1,100 @@
import { assert, assertEquals, fail } from '@std/assert';
import * as plugins from '../ts/plugins.ts';
import type { IUser as IDatabaseUser } from '../ts/types.ts';
import { AdminHandler } from '../ts/opsserver/handlers/admin.handler.ts';
import {
hashPassword,
isPbkdf2Hash,
verifyPassword,
} from '../ts/utils/auth.ts';
class FakeDatabase {
constructor(private users: Map<string, IDatabaseUser>) {}
getUserByUsername(username: string): IDatabaseUser | null {
return this.users.get(username) ?? null;
}
updateUserPassword(username: string, passwordHash: string): void {
const user = this.users.get(username);
if (!user) {
return;
}
this.users.set(username, {
...user,
passwordHash,
updatedAt: Date.now(),
});
}
}
async function createAdminHandler(users: IDatabaseUser[]): Promise<AdminHandler> {
const userMap = new Map(users.map((user) => [user.username, user]));
const fakeOpsServer = {
typedrouter: new plugins.typedrequest.TypedRouter(),
oneboxRef: {
database: new FakeDatabase(userMap),
},
};
const adminHandler = new AdminHandler(fakeOpsServer as any);
await adminHandler.initialize();
return adminHandler;
}
Deno.test('password helpers support PBKDF2 password hashes', async () => {
const password = 'correct horse battery staple';
const passwordHash = await hashPassword(password);
assert(isPbkdf2Hash(passwordHash));
assert(await verifyPassword(password, passwordHash));
assert(!(await verifyPassword('wrong password', passwordHash)));
assert(!(await verifyPassword(password, btoa(password))));
});
Deno.test('verified identity is derived from the signed JWT and database, not client fields', async () => {
const adminHandler = await createAdminHandler([
{
id: 1,
username: 'alice',
passwordHash: await hashPassword('password123'),
role: 'user',
createdAt: Date.now(),
updatedAt: Date.now(),
},
]);
const expiresAt = Date.now() + 60_000;
const jwt = await adminHandler.smartjwtInstance.createJWT({
userId: '1',
username: 'alice',
role: 'user',
status: 'loggedIn',
expiresAt,
});
const verifiedIdentity = await adminHandler.getVerifiedIdentity({
jwt,
userId: '999',
username: 'mallory',
role: 'admin',
expiresAt: 0,
});
assertEquals(verifiedIdentity.userId, '1');
assertEquals(verifiedIdentity.username, 'alice');
assertEquals(verifiedIdentity.role, 'user');
assertEquals(verifiedIdentity.expiresAt, expiresAt);
let rejected = false;
try {
await adminHandler.getVerifiedAdminIdentity(verifiedIdentity);
fail('Expected admin-only identity verification to reject non-admin users');
} catch {
rejected = true;
}
assert(rejected);
});
+213
View File
@@ -0,0 +1,213 @@
import { assert, assertEquals } from '@std/assert';
import { ExternalGatewayManager } from '../ts/classes/external-gateway.ts';
import type { IDomain, IService, ISslCertificate } from '../ts/types.ts';
class FakeDatabase {
public settings = new Map<string, string>();
public secretSettings = new Map<string, string>();
public domains: IDomain[] = [];
public certificates = new Map<string, ISslCertificate>();
private nextDomainId = 1;
getSetting(key: string): string | null {
return this.settings.get(key) ?? null;
}
setSetting(key: string, value: string): void {
this.settings.set(key, value);
}
async getSecretSetting(key: string): Promise<string | null> {
return this.secretSettings.get(key) ?? null;
}
getDomainByName(domain: string): IDomain | null {
return this.domains.find((entry) => entry.domain === domain) ?? null;
}
createDomain(domain: Omit<IDomain, 'id'>): IDomain {
const createdDomain = { ...domain, id: this.nextDomainId++ };
this.domains.push(createdDomain);
return createdDomain;
}
updateDomain(id: number, updates: Partial<IDomain>): void {
const index = this.domains.findIndex((entry) => entry.id === id);
if (index === -1) return;
this.domains[index] = { ...this.domains[index], ...updates };
}
getDomainsByProvider(provider: NonNullable<IDomain['dnsProvider']>): IDomain[] {
return this.domains.filter((entry) => entry.dnsProvider === provider);
}
getSSLCertificate(domain: string): ISslCertificate | null {
return this.certificates.get(domain) ?? null;
}
updateSSLCertificate(domain: string, updates: Partial<ISslCertificate>): void {
const existing = this.certificates.get(domain);
if (!existing) return;
this.certificates.set(domain, { ...existing, ...updates });
}
async createSSLCertificate(cert: Omit<ISslCertificate, 'id'>): Promise<ISslCertificate> {
const storedCert = { ...cert, id: this.certificates.size + 1 };
this.certificates.set(cert.domain, storedCert);
return storedCert;
}
}
const makeOneboxRef = () => {
const database = new FakeDatabase();
database.settings.set('dcrouterGatewayUrl', 'https://edge.example.com');
database.settings.set('dcrouterWorkHosterId', 'onebox-1');
database.secretSettings.set('dcrouterGatewayApiToken', 'dcr-token');
let reloadCount = 0;
return {
database,
reverseProxy: {
reloadCertificates: async () => {
reloadCount++;
},
get reloadCount() {
return reloadCount;
},
},
};
};
Deno.test('ExternalGatewayManager syncs dcrouter domains into Onebox domains', async () => {
const oneboxRef = makeOneboxRef();
oneboxRef.database.domains.push({
id: 99,
domain: 'old.example.com',
dnsProvider: 'dcrouter',
isObsolete: false,
defaultWildcard: true,
createdAt: 1,
updatedAt: 1,
});
const manager = new ExternalGatewayManager(oneboxRef as any);
(manager as any).fireDcRouterRequest = async (method: string) => {
assertEquals(method, 'getWorkHosterDomains');
return {
domains: [
{
name: 'example.com',
capabilities: {
canCreateSubdomains: true,
canManageDnsRecords: true,
canIssueCertificates: true,
canHostEmail: true,
},
},
],
};
};
const domains = await manager.syncDomains();
assertEquals(domains.length, 2);
assertEquals(oneboxRef.database.getDomainByName('example.com')?.dnsProvider, 'dcrouter');
assertEquals(oneboxRef.database.getDomainByName('example.com')?.defaultWildcard, true);
assertEquals(oneboxRef.database.getDomainByName('old.example.com')?.isObsolete, true);
});
Deno.test('ExternalGatewayManager syncs service routes to dcrouter WorkHoster API', async () => {
const oneboxRef = makeOneboxRef();
oneboxRef.database.settings.set('serverIP', '203.0.113.10');
oneboxRef.database.settings.set('httpPort', '8080');
const service: IService = {
id: 1,
name: 'hello',
image: 'nginx:latest',
envVars: {},
port: 3000,
domain: 'hello.example.com',
status: 'running',
createdAt: 1,
updatedAt: 1,
};
const requests: Array<{ method: string; requestData: Record<string, unknown> }> = [];
const manager = new ExternalGatewayManager(oneboxRef as any);
(manager as any).fireDcRouterRequest = async (method: string, requestData: Record<string, unknown>) => {
requests.push({ method, requestData });
if (method === 'exportCertificate') {
return { success: false };
}
return { success: true, action: 'created', routeId: 'route-1' };
};
await manager.syncServiceRoute(service);
const syncRequest = requests.find((request) => request.method === 'syncWorkAppRoute')!;
const route = syncRequest.requestData.route as any;
const ownership = syncRequest.requestData.ownership as any;
assertEquals(ownership, {
workHosterType: 'onebox',
workHosterId: 'onebox-1',
workAppId: 'hello',
hostname: 'hello.example.com',
});
assertEquals(route.match, { ports: [443], domains: ['hello.example.com'] });
assertEquals(route.action.targets, [{ host: '203.0.113.10', port: 8080 }]);
assertEquals(route.action.tls, { mode: 'terminate', certificate: 'auto' });
assertEquals(syncRequest.requestData.enabled, true);
});
Deno.test('ExternalGatewayManager deletes service routes through dcrouter WorkHoster API', async () => {
const oneboxRef = makeOneboxRef();
const manager = new ExternalGatewayManager(oneboxRef as any);
let deleteRequest: Record<string, unknown> | null = null;
(manager as any).fireDcRouterRequest = async (method: string, requestData: Record<string, unknown>) => {
assertEquals(method, 'syncWorkAppRoute');
deleteRequest = requestData;
return { success: true, action: 'deleted', routeId: 'route-1' };
};
await manager.deleteServiceRoute({
id: 1,
name: 'hello',
domain: 'hello.example.com',
});
assert(deleteRequest);
const capturedDeleteRequest = deleteRequest as Record<string, unknown>;
assertEquals(capturedDeleteRequest.delete, true);
assertEquals((capturedDeleteRequest.ownership as any).hostname, 'hello.example.com');
});
Deno.test('ExternalGatewayManager imports exported dcrouter certificates into Onebox', async () => {
const oneboxRef = makeOneboxRef();
const manager = new ExternalGatewayManager(oneboxRef as any);
(manager as any).fireDcRouterRequest = async (method: string, requestData: Record<string, unknown>) => {
assertEquals(method, 'exportCertificate');
assertEquals(requestData.domain, 'hello.example.com');
return {
success: true,
cert: {
id: 'cert-1',
domainName: 'hello.example.com',
created: 1,
validUntil: 2,
privateKey: '-----BEGIN PRIVATE KEY-----\nfake\n-----END PRIVATE KEY-----',
publicKey: '-----BEGIN CERTIFICATE-----\nfake\n-----END CERTIFICATE-----',
csr: '',
},
};
};
const imported = await manager.importCertificateForDomain('hello.example.com');
assert(imported);
assertEquals(oneboxRef.database.getSSLCertificate('hello.example.com')?.issuer, 'dcrouter');
assertEquals(oneboxRef.reverseProxy.reloadCount, 1);
});
+73
View File
@@ -0,0 +1,73 @@
import { assert, assertEquals } from '@std/assert';
import { SecretSettingsManager } from '../ts/database/secret-settings.ts';
class FakeAuthRepository {
public settings = new Map<string, string>();
public secretSettings = new Map<string, string>();
getSetting(key: string): string | null {
return this.settings.get(key) ?? null;
}
setSetting(key: string, value: string): void {
this.settings.set(key, value);
}
deleteSetting(key: string): void {
this.settings.delete(key);
}
getSecretSetting(key: string): string | null {
return this.secretSettings.get(key) ?? null;
}
setSecretSetting(key: string, value: string): void {
this.secretSettings.set(key, value);
}
deleteSecretSetting(key: string): void {
this.secretSettings.delete(key);
}
}
Deno.test('secret settings migrate legacy plaintext aliases into encrypted storage', async () => {
const authRepo = new FakeAuthRepository();
authRepo.setSetting('cloudflareAPIKey', 'cf-secret-token');
const secretSettings = new SecretSettingsManager(authRepo as any);
const token = await secretSettings.get('cloudflareToken');
assertEquals(token, 'cf-secret-token');
assertEquals(authRepo.getSetting('cloudflareAPIKey'), null);
assertEquals(authRepo.getSetting('cloudflareToken'), null);
const storedSecret = authRepo.getSecretSetting('cloudflareToken');
assert(storedSecret?.startsWith('enc:v1:'));
});
Deno.test('secret settings canonicalize aliases and clear old secret entries', async () => {
const authRepo = new FakeAuthRepository();
const secretSettings = new SecretSettingsManager(authRepo as any);
await secretSettings.set('backup_encryption_password', 'backup-passphrase');
assertEquals(await secretSettings.get('backupPassword'), 'backup-passphrase');
assert(authRepo.getSecretSetting('backupPassword')?.startsWith('enc:v1:'));
assertEquals(authRepo.getSecretSetting('backup_encryption_password'), null);
secretSettings.clear('backupPassword');
assertEquals(await secretSettings.get('backupPassword'), null);
});
Deno.test('secret settings treat dcrouter gateway token as encrypted secret', async () => {
const authRepo = new FakeAuthRepository();
authRepo.setSetting('externalGatewayApiToken', 'dcr-secret-token');
const secretSettings = new SecretSettingsManager(authRepo as any);
const token = await secretSettings.get('dcrouterGatewayApiToken');
assertEquals(token, 'dcr-secret-token');
assertEquals(authRepo.getSetting('externalGatewayApiToken'), null);
assert(authRepo.getSecretSetting('dcrouterGatewayApiToken')?.startsWith('enc:v1:'));
});
+61
View File
@@ -0,0 +1,61 @@
import { assert, assertEquals } from '@std/assert';
import type { IRegistry } from '../ts/types.ts';
import { credentialEncryption } from '../ts/classes/encryption.ts';
import { OneboxRegistriesManager } from '../ts/classes/registries.ts';
class FakeRegistryDatabase {
private registries = new Map<string, IRegistry>();
getRegistryByURL(url: string): IRegistry | null {
return this.registries.get(url) ?? null;
}
async createRegistry(registry: Omit<IRegistry, 'id'>): Promise<IRegistry> {
const savedRegistry: IRegistry = {
id: this.registries.size + 1,
...registry,
};
this.registries.set(savedRegistry.url, savedRegistry);
return savedRegistry;
}
deleteRegistry(url: string): void {
this.registries.delete(url);
}
getAllRegistries(): IRegistry[] {
return Array.from(this.registries.values());
}
}
Deno.test('credential encryption lazily initializes and roundtrips payloads', async () => {
const encrypted = await credentialEncryption.encrypt({ password: 'super-secret' });
const decrypted = await credentialEncryption.decrypt<{ password: string }>(encrypted);
assert(encrypted.length > 0);
assertEquals(decrypted.password, 'super-secret');
});
Deno.test('registry passwords use encrypted storage with legacy decode fallback', async () => {
const fakeDatabase = new FakeRegistryDatabase();
const registriesManager = new OneboxRegistriesManager({ database: fakeDatabase } as any);
(registriesManager as any).loginToRegistry = async () => {};
const registry = await registriesManager.addRegistry(
'registry.example.com',
'ci-user',
'correct horse battery staple',
);
assert(registry.passwordEncrypted.startsWith('enc:v1:'));
assertEquals(
await (registriesManager as any).decryptPassword(registry.passwordEncrypted),
'correct horse battery staple',
);
assertEquals(
await (registriesManager as any).decryptPassword(btoa('legacy-password')),
'legacy-password',
);
});
+1 -1
View File
@@ -3,6 +3,6 @@
*/ */
export const commitinfo = { export const commitinfo = {
name: '@serve.zone/onebox', name: '@serve.zone/onebox',
version: '1.22.2', version: '1.24.2',
description: 'Self-hosted container platform with automatic SSL and DNS - a mini Heroku for single servers' description: 'Self-hosted container platform with automatic SSL and DNS - a mini Heroku for single servers'
} }
-210
View File
@@ -1,210 +0,0 @@
/**
* API Client for communicating with Onebox daemon
*
* Provides methods for CLI commands to interact with running daemon via HTTP API
*/
import type {
IService,
IRegistry,
IDnsRecord,
ISslCertificate,
IServiceDeployOptions,
} from '../types.ts';
import { getErrorMessage } from '../utils/error.ts';
export class OneboxApiClient {
private baseUrl: string;
private token?: string;
constructor(port = 3000) {
this.baseUrl = `http://localhost:${port}`;
}
/**
* Check if daemon is reachable
*/
async isReachable(): Promise<boolean> {
try {
const response = await fetch(`${this.baseUrl}/api/status`, {
signal: AbortSignal.timeout(5000), // 5 second timeout
});
return response.ok;
} catch {
return false;
}
}
// ============ Service Operations ============
async deployService(config: IServiceDeployOptions): Promise<IService> {
return await this.request<IService>('POST', '/api/services', config);
}
async removeService(name: string): Promise<void> {
await this.request('DELETE', `/api/services/${name}`);
}
async startService(name: string): Promise<void> {
await this.request('POST', `/api/services/${name}/start`);
}
async stopService(name: string): Promise<void> {
await this.request('POST', `/api/services/${name}/stop`);
}
async restartService(name: string): Promise<void> {
await this.request('POST', `/api/services/${name}/restart`);
}
async listServices(): Promise<IService[]> {
return await this.request<IService[]>('GET', '/api/services');
}
async getServiceLogs(name: string, limit = 1000): Promise<string[]> {
const result = await this.request<{ logs: string[] }>(
'GET',
`/api/services/${name}/logs?limit=${limit}`
);
return result.logs;
}
// ============ Registry Operations ============
async addRegistry(url: string, username: string, password: string): Promise<void> {
await this.request('POST', '/api/registries', { url, username, password });
}
async removeRegistry(url: string): Promise<void> {
await this.request('DELETE', `/api/registries/${encodeURIComponent(url)}`);
}
async listRegistries(): Promise<IRegistry[]> {
return await this.request<IRegistry[]>('GET', '/api/registries');
}
// ============ DNS Operations ============
async addDnsRecord(domain: string): Promise<void> {
await this.request('POST', '/api/dns', { domain });
}
async removeDnsRecord(domain: string): Promise<void> {
await this.request('DELETE', `/api/dns/${domain}`);
}
async listDnsRecords(): Promise<IDnsRecord[]> {
return await this.request<IDnsRecord[]>('GET', '/api/dns');
}
async syncDns(): Promise<void> {
await this.request('POST', '/api/dns/sync');
}
// ============ SSL Operations ============
async renewCertificate(domain?: string): Promise<void> {
const path = domain ? `/api/ssl/renew/${domain}` : '/api/ssl/renew';
await this.request('POST', path);
}
async listCertificates(): Promise<ISslCertificate[]> {
return await this.request<ISslCertificate[]>('GET', '/api/ssl');
}
async forceRenewCertificate(domain: string): Promise<void> {
await this.request('POST', `/api/ssl/renew/${domain}?force=true`);
}
// ============ Nginx Operations ============
async reloadNginx(): Promise<void> {
await this.request('POST', '/api/nginx/reload');
}
async testNginx(): Promise<{ success: boolean; output: string }> {
return await this.request('POST', '/api/nginx/test');
}
async getNginxStatus(): Promise<{ status: string }> {
return await this.request('GET', '/api/nginx/status');
}
// ============ Config Operations ============
async getSettings(): Promise<Record<string, string>> {
return await this.request<Record<string, string>>('GET', '/api/config');
}
async setSetting(key: string, value: string): Promise<void> {
await this.request('POST', '/api/config', { key, value });
}
// ============ System Operations ============
async getStatus(): Promise<{
services: { total: number; running: number; stopped: number };
uptime: number;
}> {
return await this.request('GET', '/api/status');
}
// ============ Helper Methods ============
/**
* Make HTTP request to daemon
*/
private async request<T = unknown>(
method: string,
path: string,
body?: unknown
): Promise<T> {
const url = `${this.baseUrl}${path}`;
const headers: Record<string, string> = {
'Content-Type': 'application/json',
};
if (this.token) {
headers['Authorization'] = `Bearer ${this.token}`;
}
const options: RequestInit = {
method,
headers,
signal: AbortSignal.timeout(30000), // 30 second timeout
};
if (body) {
options.body = JSON.stringify(body);
}
try {
const response = await fetch(url, options);
if (!response.ok) {
const errorData = await response.json().catch(() => ({ message: response.statusText }));
throw new Error(errorData.message || `HTTP ${response.status}: ${response.statusText}`);
}
// For DELETE and some POST requests, there might be no content
if (response.status === 204 || response.headers.get('content-length') === '0') {
return undefined as T;
}
return await response.json();
} catch (error) {
if (error instanceof Error && error.name === 'TimeoutError') {
throw new Error('Request timed out. Daemon might be unresponsive.');
}
throw error;
}
}
/**
* Set authentication token
*/
setToken(token: string): void {
this.token = token;
}
}
+73
View File
@@ -0,0 +1,73 @@
/**
* App Store type definitions
*/
export interface ICatalog {
schemaVersion: number;
updatedAt: string;
apps: ICatalogApp[];
}
export interface ICatalogApp {
id: string;
name: string;
description: string;
category: string;
iconName?: string;
iconUrl?: string;
latestVersion: string;
tags?: string[];
}
export interface IAppMeta {
id: string;
name: string;
description: string;
category: string;
iconName?: string;
latestVersion: string;
versions: string[];
maintainer?: string;
links?: Record<string, string>;
}
export interface IAppVersionConfig {
image: string;
port: number;
envVars?: Array<{ key: string; value: string; description: string; required?: boolean }>;
volumes?: string[];
platformRequirements?: {
mongodb?: boolean;
s3?: boolean;
clickhouse?: boolean;
redis?: boolean;
mariadb?: boolean;
};
minOneboxVersion?: string;
}
export interface IMigrationContext {
service: {
name: string;
image: string;
envVars: Record<string, string>;
port: number;
};
fromVersion: string;
toVersion: string;
}
export interface IMigrationResult {
success: boolean;
envVars?: Record<string, string>;
image?: string;
warnings: string[];
}
export interface IUpgradeableService {
serviceName: string;
appTemplateId: string;
currentVersion: string;
latestVersion: string;
hasMigration: boolean;
}
+335
View File
@@ -0,0 +1,335 @@
/**
* App Store Manager
* Fetches, caches, and serves app templates from the remote appstore-apptemplates repo.
* The remote repo is the single source of truth — no fallback catalog.
*/
import type {
ICatalog,
ICatalogApp,
IAppMeta,
IAppVersionConfig,
IMigrationContext,
IMigrationResult,
IUpgradeableService,
} from './appstore-types.ts';
import { logger } from '../logging.ts';
import { getErrorMessage } from '../utils/error.ts';
import type { Onebox } from './onebox.ts';
import type { IService } from '../types.ts';
export class AppStoreManager {
private oneboxRef: Onebox;
private catalogCache: ICatalog | null = null;
private lastFetchTime = 0;
private readonly repoBaseUrl = 'https://code.foss.global/serve.zone/appstore-apptemplates/raw/branch/main';
private readonly cacheTtlMs = 5 * 60 * 1000; // 5 minutes
constructor(oneboxRef: Onebox) {
this.oneboxRef = oneboxRef;
}
async init(): Promise<void> {
try {
await this.getCatalog();
logger.info(`App Store initialized with ${this.catalogCache?.apps.length || 0} templates`);
} catch (error) {
logger.warn(`App Store initialization failed: ${getErrorMessage(error)}`);
logger.warn('App Store will retry on next request');
}
}
/**
* Get the catalog (cached, refreshes after TTL)
*/
async getCatalog(): Promise<ICatalog> {
const now = Date.now();
if (this.catalogCache && (now - this.lastFetchTime) < this.cacheTtlMs) {
return this.catalogCache;
}
try {
const catalog = await this.fetchJson('catalog.json') as ICatalog;
if (catalog && catalog.apps && Array.isArray(catalog.apps)) {
this.catalogCache = catalog;
this.lastFetchTime = now;
return catalog;
}
throw new Error('Invalid catalog format');
} catch (error) {
logger.warn(`Failed to fetch remote catalog: ${getErrorMessage(error)}`);
// Return cached if available, otherwise return empty catalog
if (this.catalogCache) {
return this.catalogCache;
}
return { schemaVersion: 1, updatedAt: '', apps: [] };
}
}
/**
* Get the catalog apps list (convenience method for the API)
*/
async getApps(): Promise<ICatalogApp[]> {
const catalog = await this.getCatalog();
return catalog.apps;
}
/**
* Fetch app metadata (versions list, etc.)
*/
async getAppMeta(appId: string): Promise<IAppMeta> {
try {
return await this.fetchJson(`apps/${appId}/app.json`) as IAppMeta;
} catch (error) {
throw new Error(`Failed to fetch metadata for app '${appId}': ${getErrorMessage(error)}`);
}
}
/**
* Fetch full config for an app version
*/
async getAppVersionConfig(appId: string, version: string): Promise<IAppVersionConfig> {
try {
return await this.fetchJson(`apps/${appId}/versions/${version}/config.json`) as IAppVersionConfig;
} catch (error) {
throw new Error(`Failed to fetch config for ${appId}@${version}: ${getErrorMessage(error)}`);
}
}
/**
* Compare deployed services against catalog to find those with available upgrades
*/
async getUpgradeableServices(): Promise<IUpgradeableService[]> {
const catalog = await this.getCatalog();
const services = this.oneboxRef.database.getAllServices();
const upgradeable: IUpgradeableService[] = [];
for (const service of services) {
if (!service.appTemplateId || !service.appTemplateVersion) continue;
const catalogApp = catalog.apps.find(a => a.id === service.appTemplateId);
if (!catalogApp) continue;
if (catalogApp.latestVersion !== service.appTemplateVersion) {
// Check if a migration script exists
const hasMigration = await this.hasMigrationScript(
service.appTemplateId,
service.appTemplateVersion,
catalogApp.latestVersion,
);
upgradeable.push({
serviceName: service.name,
appTemplateId: service.appTemplateId,
currentVersion: service.appTemplateVersion,
latestVersion: catalogApp.latestVersion,
hasMigration,
});
}
}
return upgradeable;
}
/**
* Check if a migration script exists for a specific version transition
*/
async hasMigrationScript(appId: string, fromVersion: string, toVersion: string): Promise<boolean> {
try {
const scriptPath = `apps/${appId}/versions/${toVersion}/migrate-from-${fromVersion}.ts`;
await this.fetchText(scriptPath);
return true;
} catch {
return false;
}
}
/**
* Execute a migration in a sandboxed Deno child process
*/
async executeMigration(service: IService, fromVersion: string, toVersion: string): Promise<IMigrationResult> {
const appId = service.appTemplateId;
if (!appId) {
throw new Error('Service has no appTemplateId');
}
// Fetch the migration script
const scriptPath = `apps/${appId}/versions/${toVersion}/migrate-from-${fromVersion}.ts`;
let scriptContent: string;
try {
scriptContent = await this.fetchText(scriptPath);
} catch {
// No migration script — do a simple config-based upgrade
logger.info(`No migration script for ${appId} ${fromVersion} -> ${toVersion}, using config-only upgrade`);
const config = await this.getAppVersionConfig(appId, toVersion);
return {
success: true,
image: config.image,
envVars: undefined, // Keep existing env vars
warnings: [],
};
}
// Write to temp file
const tempFile = `/tmp/onebox-migration-${crypto.randomUUID()}.ts`;
await Deno.writeTextFile(tempFile, scriptContent);
try {
// Prepare context
const context: IMigrationContext = {
service: {
name: service.name,
image: service.image,
envVars: service.envVars,
port: service.port,
},
fromVersion,
toVersion,
};
// Execute in sandboxed Deno child process
const cmd = new Deno.Command('deno', {
args: ['run', '--allow-env', '--allow-net=none', '--allow-read=none', '--allow-write=none', tempFile],
stdin: 'piped',
stdout: 'piped',
stderr: 'piped',
});
const child = cmd.spawn();
// Write context to stdin
const writer = child.stdin.getWriter();
await writer.write(new TextEncoder().encode(JSON.stringify(context)));
await writer.close();
// Read result
const output = await child.output();
const exitCode = output.code;
const stdout = new TextDecoder().decode(output.stdout);
const stderr = new TextDecoder().decode(output.stderr);
if (exitCode !== 0) {
logger.error(`Migration script failed (exit ${exitCode}): ${stderr.substring(0, 500)}`);
return {
success: false,
warnings: [`Migration script failed: ${stderr.substring(0, 200)}`],
};
}
// Parse result from stdout
try {
const result = JSON.parse(stdout) as IMigrationResult;
result.success = true;
return result;
} catch {
logger.error(`Failed to parse migration output: ${stdout.substring(0, 200)}`);
return {
success: false,
warnings: ['Migration script produced invalid output'],
};
}
} finally {
// Cleanup temp file
try {
await Deno.remove(tempFile);
} catch {
// Ignore cleanup errors
}
}
}
/**
* Apply an upgrade: update image, env vars, recreate container
*/
async applyUpgrade(
serviceName: string,
migrationResult: IMigrationResult,
newVersion: string,
): Promise<IService> {
const service = this.oneboxRef.database.getServiceByName(serviceName);
if (!service) {
throw new Error(`Service not found: ${serviceName}`);
}
// Stop the existing container
if (service.containerID && service.status === 'running') {
await this.oneboxRef.services.stopService(serviceName);
}
// Update service record
const updates: Partial<IService> = {
appTemplateVersion: newVersion,
};
if (migrationResult.image) {
updates.image = migrationResult.image;
}
if (migrationResult.envVars) {
// Merge: migration result provides base, user overrides preserved
const mergedEnvVars = { ...migrationResult.envVars };
// Keep any user-set env vars that aren't in the migration result
for (const [key, value] of Object.entries(service.envVars)) {
if (!(key in mergedEnvVars)) {
mergedEnvVars[key] = value;
}
}
updates.envVars = mergedEnvVars;
}
this.oneboxRef.database.updateService(service.id!, updates);
// Pull new image if changed
const newImage = migrationResult.image || service.image;
if (migrationResult.image && migrationResult.image !== service.image) {
await this.oneboxRef.docker.pullImage(newImage);
}
// Recreate and start container
const updatedService = this.oneboxRef.database.getServiceByName(serviceName)!;
// Remove old container
if (service.containerID) {
try {
await this.oneboxRef.docker.removeContainer(service.containerID, true);
} catch {
// Container might already be gone
}
}
// Create new container
const containerID = await this.oneboxRef.docker.createContainer(updatedService);
this.oneboxRef.database.updateService(service.id!, { containerID, status: 'starting' });
// Start container
await this.oneboxRef.docker.startContainer(containerID);
this.oneboxRef.database.updateService(service.id!, { status: 'running' });
logger.success(`Service '${serviceName}' upgraded to template version ${newVersion}`);
return this.oneboxRef.database.getServiceByName(serviceName)!;
}
/**
* Fetch JSON from the remote repo
*/
private async fetchJson(path: string): Promise<unknown> {
const url = `${this.repoBaseUrl}/${path}`;
const response = await fetch(url);
if (!response.ok) {
throw new Error(`HTTP ${response.status} for ${url}`);
}
return response.json();
}
/**
* Fetch text from the remote repo
*/
private async fetchText(path: string): Promise<string> {
const url = `${this.repoBaseUrl}/${path}`;
const response = await fetch(url);
if (!response.ok) {
throw new Error(`HTTP ${response.status} for ${url}`);
}
return response.text();
}
}
File diff suppressed because it is too large Load Diff
+57 -2
View File
@@ -59,6 +59,15 @@ export class BackupScheduler {
await this.registerTask(schedule); await this.registerTask(schedule);
} }
// Add periodic archive prune task (runs daily at 3 AM)
const pruneTask = new plugins.taskbuffer.Task({
name: 'backup-archive-prune',
taskFunction: async () => {
await this.pruneArchive();
},
});
this.taskManager.addAndScheduleTask(pruneTask, '0 3 * * *');
// Start the task manager (activates cron scheduling) // Start the task manager (activates cron scheduling)
await this.taskManager.start(); await this.taskManager.start();
@@ -436,9 +445,11 @@ export class BackupScheduler {
if (!toKeep.has(backup.id!)) { if (!toKeep.has(backup.id!)) {
try { try {
await this.oneboxRef.backupManager.deleteBackup(backup.id!); await this.oneboxRef.backupManager.deleteBackup(backup.id!);
logger.info(`Deleted backup ${backup.filename} (retention policy)`); const backupRef = backup.snapshotId || backup.filename;
logger.info(`Deleted backup ${backupRef} (retention policy)`);
} catch (error) { } catch (error) {
logger.warn(`Failed to delete old backup ${backup.filename}: ${getErrorMessage(error)}`); const backupRef = backup.snapshotId || backup.filename;
logger.warn(`Failed to delete old backup ${backupRef}: ${getErrorMessage(error)}`);
} }
} }
} }
@@ -647,4 +658,48 @@ export class BackupScheduler {
private getRetentionDescription(retention: IRetentionPolicy): string { private getRetentionDescription(retention: IRetentionPolicy): string {
return `H:${retention.hourly} D:${retention.daily} W:${retention.weekly} M:${retention.monthly}`; return `H:${retention.hourly} D:${retention.daily} W:${retention.weekly} M:${retention.monthly}`;
} }
/**
* Prune the containerarchive repository to reclaim storage.
* Uses the most generous retention policy across all schedules.
*/
private async pruneArchive(): Promise<void> {
const archive = this.oneboxRef.backupManager.archive;
if (!archive) return;
try {
// Compute the most generous retention across all schedules
const schedules = this.oneboxRef.database.getAllBackupSchedules();
// Default minimums if no schedules exist
let maxDays = 7;
let maxWeeks = 4;
let maxMonths = 12;
for (const schedule of schedules) {
if (schedule.retention.daily > maxDays) maxDays = schedule.retention.daily;
if (schedule.retention.weekly > maxWeeks) maxWeeks = schedule.retention.weekly;
if (schedule.retention.monthly > maxMonths) maxMonths = schedule.retention.monthly;
}
const result = await archive.prune(
{
keepDays: maxDays,
keepWeeks: maxWeeks,
keepMonths: maxMonths,
},
false, // not dry run
);
if (result.removedSnapshots > 0 || result.freedBytes > 0) {
const freedMB = Math.round(result.freedBytes / (1024 * 1024) * 10) / 10;
logger.info(
`Archive prune: removed ${result.removedSnapshots} snapshot(s), ` +
`${result.removedPacks} pack(s), freed ${freedMB} MB`
);
}
} catch (error) {
logger.warn(`Archive prune failed: ${getErrorMessage(error)}`);
}
}
} }
-592
View File
@@ -1,592 +0,0 @@
/**
* Caddy Manager for Onebox
*
* Manages Caddy as a Docker Swarm service instead of a host binary.
* This allows Caddy to access services on the Docker overlay network.
*/
import * as plugins from '../plugins.ts';
import { logger } from '../logging.ts';
import { getErrorMessage } from '../utils/error.ts';
const CADDY_SERVICE_NAME = 'onebox-caddy';
const CADDY_IMAGE = 'caddy:2-alpine';
const DOCKER_GATEWAY_IP = '172.17.0.1'; // Docker bridge gateway for container-to-host communication
export interface ICaddyRoute {
domain: string;
upstream: string; // e.g., "onebox-hello-world:80"
}
export interface ICaddyCertificate {
domain: string;
certPem: string;
keyPem: string;
}
interface ICaddyLoggingConfig {
logs: {
[name: string]: {
writer: {
output: string;
address?: string;
dial_timeout?: string;
soft_start?: boolean;
};
encoder?: { format: string };
level?: string;
include?: string[];
};
};
}
interface ICaddyConfig {
admin: {
listen: string;
};
logging?: ICaddyLoggingConfig;
apps: {
http: {
servers: {
[key: string]: {
listen: string[];
routes: ICaddyRouteConfig[];
automatic_https?: {
disable?: boolean;
disable_redirects?: boolean;
};
logs?: {
default_logger_name: string;
};
};
};
};
tls?: {
automation?: {
policies: Array<{ issuers: never[] }>;
};
certificates?: {
load_pem?: Array<{
certificate: string;
key: string;
tags?: string[];
}>;
};
};
};
}
interface ICaddyRouteConfig {
match: Array<{ host: string[] }>;
handle: Array<{
handler: string;
upstreams?: Array<{ dial: string }>;
routes?: ICaddyRouteConfig[];
}>;
terminal?: boolean;
}
export class CaddyManager {
private dockerClient: InstanceType<typeof plugins.docker.Docker> | null = null;
private certsDir: string;
private adminUrl: string;
private httpPort: number;
private httpsPort: number;
private logReceiverPort: number;
private loggingEnabled: boolean;
private routes: Map<string, ICaddyRoute> = new Map();
private certificates: Map<string, ICaddyCertificate> = new Map();
private networkName = 'onebox-network';
private serviceRunning = false;
constructor(options?: {
certsDir?: string;
adminPort?: number;
httpPort?: number;
httpsPort?: number;
logReceiverPort?: number;
loggingEnabled?: boolean;
}) {
this.certsDir = options?.certsDir || './.nogit/certs';
this.adminUrl = `http://localhost:${options?.adminPort || 2019}`;
this.httpPort = options?.httpPort || 8080;
this.httpsPort = options?.httpsPort || 8443;
this.logReceiverPort = options?.logReceiverPort || 9999;
this.loggingEnabled = options?.loggingEnabled ?? true;
}
/**
* Initialize Docker client for Caddy service management
*/
private async ensureDockerClient(): Promise<void> {
if (!this.dockerClient) {
this.dockerClient = new plugins.docker.Docker({
socketPath: 'unix:///var/run/docker.sock',
});
await this.dockerClient.start();
}
}
/**
* Update listening ports (must call reloadConfig after if running)
*/
setPorts(httpPort: number, httpsPort: number): void {
this.httpPort = httpPort;
this.httpsPort = httpsPort;
}
/**
* Start Caddy as a Docker Swarm service
*/
async start(): Promise<void> {
if (this.serviceRunning) {
logger.warn('Caddy service is already running');
return;
}
try {
await this.ensureDockerClient();
// Create certs directory for backup/persistence
await Deno.mkdir(this.certsDir, { recursive: true });
logger.info('Starting Caddy Docker service...');
// Check if service already exists
const existingService = await this.getExistingService();
if (existingService) {
logger.info('Caddy service exists, removing old service...');
await this.removeService();
// Wait for service to be removed
await new Promise((resolve) => setTimeout(resolve, 2000));
}
// Get network ID
const networkId = await this.getNetworkId();
// Create Caddy Docker service
const response = await this.dockerClient!.request('POST', '/services/create', {
Name: CADDY_SERVICE_NAME,
Labels: {
'managed-by': 'onebox',
'onebox-type': 'caddy',
},
TaskTemplate: {
ContainerSpec: {
Image: CADDY_IMAGE,
// Start Caddy with admin listening on all interfaces so we can reach it from host
// Write minimal config to /tmp and start Caddy with that config
Command: ['sh', '-c', 'printf \'{"admin":{"listen":"0.0.0.0:2019"}}\' > /tmp/caddy.json && caddy run --config /tmp/caddy.json'],
},
Networks: [
{
Target: networkId,
},
],
RestartPolicy: {
Condition: 'any',
MaxAttempts: 0,
},
},
Mode: {
Replicated: {
Replicas: 1,
},
},
EndpointSpec: {
Ports: [
{
Protocol: 'tcp',
TargetPort: 80,
PublishedPort: this.httpPort,
PublishMode: 'host',
},
{
Protocol: 'tcp',
TargetPort: 443,
PublishedPort: this.httpsPort,
PublishMode: 'host',
},
{
Protocol: 'tcp',
TargetPort: 2019,
PublishedPort: 2019,
PublishMode: 'host',
},
],
},
});
if (response.statusCode >= 300) {
throw new Error(`Failed to create Caddy service: HTTP ${response.statusCode} - ${JSON.stringify(response.body)}`);
}
logger.info(`Caddy service created: ${response.body.ID}`);
// Wait for Admin API to be ready
await this.waitForReady();
this.serviceRunning = true;
// Now configure via Admin API with current routes and certificates
await this.reloadConfig();
logger.success(`Caddy started (HTTP: ${this.httpPort}, HTTPS: ${this.httpsPort}, Admin: ${this.adminUrl})`);
} catch (error) {
logger.error(`Failed to start Caddy: ${getErrorMessage(error)}`);
throw error;
}
}
/**
* Get existing Caddy service if any
*/
private async getExistingService(): Promise<any | null> {
try {
const response = await this.dockerClient!.request('GET', `/services/${CADDY_SERVICE_NAME}`, {});
if (response.statusCode === 200) {
return response.body;
}
return null;
} catch {
return null;
}
}
/**
* Remove the Caddy service
*/
private async removeService(): Promise<void> {
try {
await this.dockerClient!.request('DELETE', `/services/${CADDY_SERVICE_NAME}`, {});
} catch {
// Service may not exist
}
}
/**
* Get network ID by name
*/
private async getNetworkId(): Promise<string> {
const networks = await this.dockerClient!.listNetworks();
const network = networks.find((n: any) => n.Name === this.networkName);
if (!network) {
throw new Error(`Network not found: ${this.networkName}`);
}
return network.Id;
}
/**
* Wait for Caddy Admin API to be ready
*/
private async waitForReady(maxAttempts = 60, intervalMs = 500): Promise<void> {
for (let i = 0; i < maxAttempts; i++) {
try {
const response = await fetch(`${this.adminUrl}/config/`);
if (response.ok) {
return;
}
} catch {
// Not ready yet
}
await new Promise((resolve) => setTimeout(resolve, intervalMs));
}
throw new Error('Caddy service failed to start within timeout');
}
/**
* Stop Caddy Docker service
*/
async stop(): Promise<void> {
if (!this.serviceRunning && !(await this.getExistingService())) {
return;
}
try {
await this.ensureDockerClient();
logger.info('Stopping Caddy service...');
await this.removeService();
this.serviceRunning = false;
logger.info('Caddy service stopped');
} catch (error) {
logger.error(`Failed to stop Caddy: ${getErrorMessage(error)}`);
}
}
/**
* Check if Caddy Admin API is healthy
*/
async isHealthy(): Promise<boolean> {
try {
const response = await fetch(`${this.adminUrl}/config/`);
return response.ok;
} catch {
return false;
}
}
/**
* Check if Caddy service is running
*/
async isRunning(): Promise<boolean> {
try {
await this.ensureDockerClient();
const service = await this.getExistingService();
if (!service) return false;
// Check if service has running tasks
const tasksResponse = await this.dockerClient!.request(
'GET',
`/tasks?filters=${encodeURIComponent(JSON.stringify({ service: [CADDY_SERVICE_NAME] }))}`,
{}
);
if (tasksResponse.statusCode !== 200) return false;
const tasks = tasksResponse.body;
return tasks.some((task: any) => task.Status?.State === 'running');
} catch {
return false;
}
}
/**
* Build Caddy JSON configuration from current routes and certificates
*/
private buildConfig(): ICaddyConfig {
const routes: ICaddyRouteConfig[] = [];
// Add routes
for (const [domain, route] of this.routes) {
routes.push({
match: [{ host: [domain] }],
handle: [
{
handler: 'reverse_proxy',
upstreams: [{ dial: route.upstream }],
},
],
terminal: true,
});
}
// Build certificate load_pem entries (inline PEM content)
const loadPem: Array<{ certificate: string; key: string; tags?: string[] }> = [];
for (const [domain, cert] of this.certificates) {
loadPem.push({
certificate: cert.certPem,
key: cert.keyPem,
tags: [domain],
});
}
const config: ICaddyConfig = {
admin: {
listen: '0.0.0.0:2019', // Listen on all interfaces inside container
},
apps: {
http: {
servers: {
main: {
listen: [':80', ':443'],
routes,
// Disable automatic HTTPS to prevent Caddy from trying to obtain certs
automatic_https: {
disable: true,
},
},
},
},
},
};
// Add access logging configuration if enabled
if (this.loggingEnabled) {
config.logging = {
logs: {
access: {
writer: {
output: 'net',
// Use Docker bridge gateway IP to reach log receiver on host
address: `tcp/${DOCKER_GATEWAY_IP}:${this.logReceiverPort}`,
dial_timeout: '5s',
soft_start: true, // Continue even if log receiver is down
},
encoder: { format: 'json' },
level: 'INFO',
include: ['http.log.access'],
},
},
};
// Associate server with access logger
config.apps.http.servers.main.logs = {
default_logger_name: 'access',
};
}
// Add TLS config if we have certificates
if (loadPem.length > 0) {
config.apps.tls = {
automation: {
// Disable automatic HTTPS - we manage certs ourselves
policies: [{ issuers: [] }],
},
certificates: {
load_pem: loadPem,
},
};
}
return config;
}
/**
* Reload Caddy configuration via Admin API
*/
async reloadConfig(): Promise<void> {
const isRunning = await this.isRunning();
if (!isRunning) {
logger.warn('Caddy not running, cannot reload config');
return;
}
const config = this.buildConfig();
try {
const response = await fetch(`${this.adminUrl}/load`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(config),
});
if (!response.ok) {
const text = await response.text();
throw new Error(`Failed to reload Caddy config: ${response.status} ${text}`);
}
logger.debug('Caddy configuration reloaded');
} catch (error) {
logger.error(`Failed to reload Caddy config: ${getErrorMessage(error)}`);
throw error;
}
}
/**
* Add or update a route
*/
async addRoute(domain: string, upstream: string): Promise<void> {
this.routes.set(domain, { domain, upstream });
if (await this.isRunning()) {
await this.reloadConfig();
}
logger.success(`Added Caddy route: ${domain} -> ${upstream}`);
}
/**
* Remove a route
*/
async removeRoute(domain: string): Promise<void> {
if (this.routes.delete(domain)) {
if (await this.isRunning()) {
await this.reloadConfig();
}
logger.success(`Removed Caddy route: ${domain}`);
}
}
/**
* Add or update a TLS certificate
* Stores PEM content in memory for Admin API, also writes to disk for backup
*/
async addCertificate(domain: string, certPem: string, keyPem: string): Promise<void> {
// Store PEM content in memory for buildConfig()
this.certificates.set(domain, {
domain,
certPem,
keyPem,
});
// Also write to disk for backup/persistence
try {
await Deno.mkdir(this.certsDir, { recursive: true });
await Deno.writeTextFile(`${this.certsDir}/${domain}.crt`, certPem);
await Deno.writeTextFile(`${this.certsDir}/${domain}.key`, keyPem);
} catch (error) {
logger.warn(`Failed to write certificate backup for ${domain}: ${getErrorMessage(error)}`);
}
if (await this.isRunning()) {
await this.reloadConfig();
}
logger.success(`Added TLS certificate for ${domain}`);
}
/**
* Remove a TLS certificate
*/
async removeCertificate(domain: string): Promise<void> {
if (this.certificates.delete(domain)) {
// Remove backup files
try {
await Deno.remove(`${this.certsDir}/${domain}.crt`);
await Deno.remove(`${this.certsDir}/${domain}.key`);
} catch {
// Files may not exist
}
if (await this.isRunning()) {
await this.reloadConfig();
}
logger.success(`Removed TLS certificate for ${domain}`);
}
}
/**
* Get all current routes
*/
getRoutes(): ICaddyRoute[] {
return Array.from(this.routes.values());
}
/**
* Get all current certificates
*/
getCertificates(): ICaddyCertificate[] {
return Array.from(this.certificates.values());
}
/**
* Clear all routes and certificates (useful for reload from database)
*/
clear(): void {
this.routes.clear();
this.certificates.clear();
}
/**
* Get status
*/
getStatus(): {
running: boolean;
httpPort: number;
httpsPort: number;
routes: number;
certificates: number;
} {
return {
running: this.serviceRunning,
httpPort: this.httpPort,
httpsPort: this.httpsPort,
routes: this.routes.size,
certificates: this.certificates.size,
};
}
}
+1 -1
View File
@@ -24,7 +24,7 @@ export class CloudflareDomainSync {
*/ */
async init(): Promise<void> { async init(): Promise<void> {
try { try {
const apiKey = this.database.getSetting('cloudflareAPIKey'); const apiKey = await this.database.getSecretSetting('cloudflareToken');
if (!apiKey) { if (!apiKey) {
logger.warn('Cloudflare API key not configured. Domain sync will be limited.'); logger.warn('Cloudflare API key not configured. Domain sync will be limited.');
+2 -2
View File
@@ -27,12 +27,12 @@ export class OneboxDnsManager {
async init(): Promise<void> { async init(): Promise<void> {
try { try {
// Get Cloudflare credentials from settings // Get Cloudflare credentials from settings
const apiKey = this.database.getSetting('cloudflareAPIKey'); const apiKey = await this.database.getSecretSetting('cloudflareToken');
const serverIP = this.database.getSetting('serverIP'); const serverIP = this.database.getSetting('serverIP');
if (!apiKey) { if (!apiKey) {
logger.warn('Cloudflare credentials not configured. DNS management will be disabled.'); logger.warn('Cloudflare credentials not configured. DNS management will be disabled.');
logger.info('Configure with: onebox config set cloudflareAPIKey <key>'); logger.info('Configure with: onebox config set cloudflareToken <key>');
return; return;
} }
+20 -2
View File
@@ -36,6 +36,23 @@ export class OneboxDockerManager {
} }
} }
/**
* Release resources held by the Docker API client.
*/
async stop(): Promise<void> {
if (!this.dockerClient) {
return;
}
try {
await this.dockerClient.stop();
} catch (error) {
logger.error(`Failed to stop Docker client: ${getErrorMessage(error)}`);
} finally {
this.dockerClient = null;
}
}
/** /**
* Ensure onebox network exists * Ensure onebox network exists
*/ */
@@ -935,8 +952,9 @@ export class OneboxDockerManager {
logger.info(`Pulling image for platform service: ${options.image}`); logger.info(`Pulling image for platform service: ${options.image}`);
await this.pullImage(options.image); await this.pullImage(options.image);
// Check if container already exists // Check running and stopped containers; stopped platform containers still reserve names.
const existingContainers = await this.dockerClient!.listContainers(); const existingContainersResponse = await this.dockerClient!.request('GET', '/containers/json?all=true', {});
const existingContainers = Array.isArray(existingContainersResponse.body) ? existingContainersResponse.body : [];
const existing = existingContainers.find((c: any) => const existing = existingContainers.find((c: any) =>
c.Names?.some((n: string) => n === `/${options.name}` || n === options.name) c.Names?.some((n: string) => n === `/${options.name}` || n === options.name)
); );
+16 -6
View File
@@ -97,7 +97,11 @@ export class CredentialEncryption {
*/ */
async encrypt(data: Record<string, string>): Promise<string> { async encrypt(data: Record<string, string>): Promise<string> {
if (!this.key) { if (!this.key) {
throw new Error('Encryption not initialized. Call init() first.'); await this.init();
}
const key = this.key;
if (!key) {
throw new Error('Encryption key initialization failed.');
} }
const iv = crypto.getRandomValues(new Uint8Array(this.ivLength)); const iv = crypto.getRandomValues(new Uint8Array(this.ivLength));
@@ -105,7 +109,7 @@ export class CredentialEncryption {
const ciphertext = await crypto.subtle.encrypt( const ciphertext = await crypto.subtle.encrypt(
{ name: this.algorithm, iv }, { name: this.algorithm, iv },
this.key, key,
encoded encoded
); );
@@ -120,9 +124,15 @@ export class CredentialEncryption {
/** /**
* Decrypt a base64 string back to credentials object * Decrypt a base64 string back to credentials object
*/ */
async decrypt(encrypted: string): Promise<Record<string, string>> { async decrypt<T extends Record<string, string> = Record<string, string>>(
encrypted: string,
): Promise<T> {
if (!this.key) { if (!this.key) {
throw new Error('Encryption not initialized. Call init() first.'); await this.init();
}
const key = this.key;
if (!key) {
throw new Error('Encryption key initialization failed.');
} }
const combined = this.base64ToBytes(encrypted); const combined = this.base64ToBytes(encrypted);
@@ -133,12 +143,12 @@ export class CredentialEncryption {
const decrypted = await crypto.subtle.decrypt( const decrypted = await crypto.subtle.decrypt(
{ name: this.algorithm, iv }, { name: this.algorithm, iv },
this.key, key,
ciphertext ciphertext
); );
const decoded = new TextDecoder().decode(decrypted); const decoded = new TextDecoder().decode(decrypted);
return JSON.parse(decoded); return JSON.parse(decoded) as T;
} }
/** /**
+352
View File
@@ -0,0 +1,352 @@
import * as plugins from '../plugins.ts';
import { logger } from '../logging.ts';
import { getErrorMessage } from '../utils/error.ts';
import { OneboxDatabase } from './database.ts';
import type { IDomain, IService } from '../types.ts';
type TWorkHosterType = 'onebox';
interface IExternalGatewayConfig {
url: string;
apiToken: string;
workHosterId: string;
targetHost?: string;
targetPort?: number;
}
interface IWorkHosterDomain {
name: string;
capabilities?: {
canCreateSubdomains: boolean;
canManageDnsRecords: boolean;
canIssueCertificates: boolean;
canHostEmail: boolean;
};
}
interface IWorkAppRouteOwnership {
workHosterType: TWorkHosterType;
workHosterId: string;
workAppId: string;
hostname: string;
}
interface IWorkAppRouteSyncResult {
success: boolean;
action?: 'created' | 'updated' | 'deleted' | 'unchanged';
routeId?: string;
message?: string;
}
interface IDcRouterCertificateExport {
success: boolean;
cert?: {
id: string;
domainName: string;
created: number;
validUntil: number;
privateKey: string;
publicKey: string;
csr: string;
};
message?: string;
}
interface IDcRouterRouteConfig {
name: string;
match: {
ports: number[];
domains: string[];
};
action: {
type: 'forward';
targets: Array<{ host: string; port: number }>;
tls: {
mode: 'terminate';
certificate: 'auto';
};
websocket: {
enabled: boolean;
};
};
}
export class ExternalGatewayManager {
private database: OneboxDatabase;
constructor(private oneboxRef: any) {
this.database = oneboxRef.database;
}
public async init(): Promise<void> {
if (!(await this.isConfigured())) {
logger.info('External dcrouter gateway not configured');
return;
}
await this.syncDomains();
}
public async isConfigured(): Promise<boolean> {
const config = await this.getConfig({ requireTarget: false });
return Boolean(config);
}
public async syncDomains(): Promise<IDomain[]> {
const config = await this.requireConfig({ requireTarget: false });
const response = await this.fireDcRouterRequest<{ domains: IWorkHosterDomain[] }>(
'getWorkHosterDomains',
{},
config,
);
const activeDomainNames = new Set<string>();
const now = Date.now();
for (const gatewayDomain of response.domains) {
const domainName = gatewayDomain.name.trim().toLowerCase();
if (!domainName) continue;
activeDomainNames.add(domainName);
const existingDomain = this.database.getDomainByName(domainName);
const defaultWildcard = gatewayDomain.capabilities?.canIssueCertificates !== false;
if (existingDomain) {
this.database.updateDomain(existingDomain.id!, {
dnsProvider: 'dcrouter',
isObsolete: false,
defaultWildcard,
updatedAt: now,
});
} else {
this.database.createDomain({
domain: domainName,
dnsProvider: 'dcrouter',
isObsolete: false,
defaultWildcard,
createdAt: now,
updatedAt: now,
});
}
}
for (const domain of this.database.getDomainsByProvider('dcrouter')) {
if (!activeDomainNames.has(domain.domain)) {
this.database.updateDomain(domain.id!, {
isObsolete: true,
updatedAt: now,
});
}
}
logger.success(`Synced ${activeDomainNames.size} domain(s) from external dcrouter gateway`);
return this.database.getDomainsByProvider('dcrouter');
}
public async syncServiceRoute(service: IService): Promise<void> {
if (!service.domain) return;
const config = await this.getConfig({ requireTarget: true });
if (!config) return;
const result = await this.fireDcRouterRequest<IWorkAppRouteSyncResult>(
'syncWorkAppRoute',
{
ownership: this.buildOwnership(service, service.domain, config),
route: this.buildRoute(service, config),
enabled: service.status === 'running',
},
config,
);
if (!result.success) {
throw new Error(result.message || `dcrouter route sync failed for ${service.domain}`);
}
logger.success(`External gateway route ${result.action || 'synced'} for ${service.domain}`);
await this.importCertificateForDomain(service.domain).catch((error) => {
logger.debug(`External gateway certificate import skipped for ${service.domain}: ${getErrorMessage(error)}`);
});
}
public async deleteServiceRoute(service: Pick<IService, 'id' | 'name' | 'domain'>): Promise<void> {
if (!service.domain) return;
const config = await this.getConfig({ requireTarget: false });
if (!config) return;
const result = await this.fireDcRouterRequest<IWorkAppRouteSyncResult>(
'syncWorkAppRoute',
{
ownership: this.buildOwnership(service, service.domain, config),
delete: true,
},
config,
);
if (!result.success) {
throw new Error(result.message || `dcrouter route delete failed for ${service.domain}`);
}
logger.info(`External gateway route ${result.action || 'deleted'} for ${service.domain}`);
}
public async importCertificateForDomain(domain: string): Promise<boolean> {
const config = await this.getConfig({ requireTarget: false });
if (!config) return false;
const result = await this.fireDcRouterRequest<IDcRouterCertificateExport>(
'exportCertificate',
{ domain },
config,
);
if (!result.success || !result.cert) {
return false;
}
const now = Date.now();
const existingCertificate = this.database.getSSLCertificate(domain);
if (existingCertificate) {
this.database.updateSSLCertificate(domain, {
certPem: result.cert.publicKey,
keyPem: result.cert.privateKey,
fullchainPem: result.cert.publicKey,
expiryDate: result.cert.validUntil,
updatedAt: now,
});
} else {
await this.database.createSSLCertificate({
domain,
certPem: result.cert.publicKey,
keyPem: result.cert.privateKey,
fullchainPem: result.cert.publicKey,
expiryDate: result.cert.validUntil,
issuer: 'dcrouter',
createdAt: now,
updatedAt: now,
});
}
await this.oneboxRef.reverseProxy.reloadCertificates();
logger.success(`Imported external gateway certificate for ${domain}`);
return true;
}
private async getConfig(options: { requireTarget?: boolean } = {}): Promise<IExternalGatewayConfig | null> {
const url = this.normalizeUrl(this.database.getSetting('dcrouterGatewayUrl') || '');
const apiToken = await this.database.getSecretSetting('dcrouterGatewayApiToken');
if (!url || !apiToken) {
return null;
}
const config: IExternalGatewayConfig = {
url,
apiToken,
workHosterId: this.ensureWorkHosterId(),
};
if (options.requireTarget !== false) {
config.targetHost = this.database.getSetting('dcrouterTargetHost')
|| this.database.getSetting('serverIP')
|| undefined;
const targetPort = this.parsePort(
this.database.getSetting('dcrouterTargetPort')
|| this.database.getSetting('httpPort')
|| '80',
);
config.targetPort = targetPort;
if (!config.targetHost) {
throw new Error('dcrouterTargetHost or serverIP must be configured for external gateway route sync');
}
}
return config;
}
private async requireConfig(options: { requireTarget?: boolean } = {}): Promise<IExternalGatewayConfig> {
const config = await this.getConfig(options);
if (!config) {
throw new Error('External dcrouter gateway is not configured');
}
return config;
}
private normalizeUrl(url: string): string {
const trimmedUrl = url.trim().replace(/\/+$/, '');
if (!trimmedUrl) return '';
if (/^https?:\/\//.test(trimmedUrl)) return trimmedUrl;
return `https://${trimmedUrl}`;
}
private parsePort(portValue: string): number {
const port = Number(portValue);
if (!Number.isInteger(port) || port < 1 || port > 65535) {
throw new Error(`Invalid dcrouter target port: ${portValue}`);
}
return port;
}
private ensureWorkHosterId(): string {
let workHosterId = this.database.getSetting('dcrouterWorkHosterId');
if (!workHosterId) {
workHosterId = crypto.randomUUID();
this.database.setSetting('dcrouterWorkHosterId', workHosterId);
}
return workHosterId;
}
private buildOwnership(
service: Pick<IService, 'id' | 'name'>,
hostname: string,
config: IExternalGatewayConfig,
): IWorkAppRouteOwnership {
return {
workHosterType: 'onebox',
workHosterId: config.workHosterId,
workAppId: service.name || `service-${service.id}`,
hostname,
};
}
private buildRoute(service: IService, config: IExternalGatewayConfig): IDcRouterRouteConfig {
return {
name: this.routeName(service.domain!),
match: {
ports: [443],
domains: [service.domain!],
},
action: {
type: 'forward',
targets: [{ host: config.targetHost!, port: config.targetPort! }],
tls: {
mode: 'terminate',
certificate: 'auto',
},
websocket: {
enabled: true,
},
},
};
}
private routeName(domain: string): string {
return `onebox-${domain.replace(/[^a-zA-Z0-9]+/g, '-').replace(/^-|-$/g, '')}`;
}
private async fireDcRouterRequest<TResponse>(
method: string,
requestData: Record<string, unknown>,
config: IExternalGatewayConfig,
): Promise<TResponse> {
const typedRequest = new plugins.typedrequest.TypedRequest<any>(
`${config.url}/typedrequest`,
method,
);
return await typedRequest.fire({
...requestData,
apiToken: config.apiToken,
}) as TResponse;
}
}
File diff suppressed because it is too large Load Diff
+70 -22
View File
@@ -6,6 +6,7 @@
import { logger } from '../logging.ts'; import { logger } from '../logging.ts';
import { getErrorMessage } from '../utils/error.ts'; import { getErrorMessage } from '../utils/error.ts';
import { hashPassword } from '../utils/auth.ts';
import { OneboxDatabase } from './database.ts'; import { OneboxDatabase } from './database.ts';
import { OneboxDockerManager } from './docker.ts'; import { OneboxDockerManager } from './docker.ts';
import { OneboxServicesManager } from './services.ts'; import { OneboxServicesManager } from './services.ts';
@@ -15,14 +16,15 @@ import { OneboxDnsManager } from './dns.ts';
import { OneboxSslManager } from './ssl.ts'; import { OneboxSslManager } from './ssl.ts';
import { OneboxDaemon } from './daemon.ts'; import { OneboxDaemon } from './daemon.ts';
import { OneboxSystemd } from './systemd.ts'; import { OneboxSystemd } from './systemd.ts';
import { OneboxHttpServer } from './httpserver.ts';
import { CloudflareDomainSync } from './cloudflare-sync.ts'; import { CloudflareDomainSync } from './cloudflare-sync.ts';
import { CertRequirementManager } from './cert-requirement-manager.ts'; import { CertRequirementManager } from './cert-requirement-manager.ts';
import { RegistryManager } from './registry.ts'; import { RegistryManager } from './registry.ts';
import { PlatformServicesManager } from './platform-services/index.ts'; import { PlatformServicesManager } from './platform-services/index.ts';
import { CaddyLogReceiver } from './caddy-log-receiver.ts'; import { AppStoreManager } from './appstore.ts';
import { ProxyLogReceiver } from './proxy-log-receiver.ts';
import { BackupManager } from './backup-manager.ts'; import { BackupManager } from './backup-manager.ts';
import { BackupScheduler } from './backup-scheduler.ts'; import { BackupScheduler } from './backup-scheduler.ts';
import { ExternalGatewayManager } from './external-gateway.ts';
import { OpsServer } from '../opsserver/index.ts'; import { OpsServer } from '../opsserver/index.ts';
export class Onebox { export class Onebox {
@@ -35,14 +37,15 @@ export class Onebox {
public ssl: OneboxSslManager; public ssl: OneboxSslManager;
public daemon: OneboxDaemon; public daemon: OneboxDaemon;
public systemd: OneboxSystemd; public systemd: OneboxSystemd;
public httpServer: OneboxHttpServer;
public cloudflareDomainSync: CloudflareDomainSync; public cloudflareDomainSync: CloudflareDomainSync;
public certRequirementManager: CertRequirementManager; public certRequirementManager: CertRequirementManager;
public registry: RegistryManager; public registry: RegistryManager;
public platformServices: PlatformServicesManager; public platformServices: PlatformServicesManager;
public caddyLogReceiver: CaddyLogReceiver; public appStore: AppStoreManager;
public proxyLogReceiver: ProxyLogReceiver;
public backupManager: BackupManager; public backupManager: BackupManager;
public backupScheduler: BackupScheduler; public backupScheduler: BackupScheduler;
public externalGateway: ExternalGatewayManager;
public opsServer: OpsServer; public opsServer: OpsServer;
private initialized = false; private initialized = false;
@@ -60,11 +63,10 @@ export class Onebox {
this.ssl = new OneboxSslManager(this); this.ssl = new OneboxSslManager(this);
this.daemon = new OneboxDaemon(this); this.daemon = new OneboxDaemon(this);
this.systemd = new OneboxSystemd(); this.systemd = new OneboxSystemd();
this.httpServer = new OneboxHttpServer(this);
this.registry = new RegistryManager({ this.registry = new RegistryManager({
dataDir: './.nogit/registry-data', dataDir: './.nogit/registry-data',
port: 4000, port: 4000,
baseUrl: 'localhost:5000', baseUrl: 'localhost:3000',
}); });
// Initialize domain management // Initialize domain management
@@ -74,8 +76,11 @@ export class Onebox {
// Initialize platform services manager // Initialize platform services manager
this.platformServices = new PlatformServicesManager(this); this.platformServices = new PlatformServicesManager(this);
// Initialize Caddy log receiver // Initialize App Store manager
this.caddyLogReceiver = new CaddyLogReceiver(9999); this.appStore = new AppStoreManager(this);
// Initialize reverse proxy log receiver
this.proxyLogReceiver = new ProxyLogReceiver(9999);
// Initialize Backup manager // Initialize Backup manager
this.backupManager = new BackupManager(this); this.backupManager = new BackupManager(this);
@@ -83,6 +88,9 @@ export class Onebox {
// Initialize Backup scheduler // Initialize Backup scheduler
this.backupScheduler = new BackupScheduler(this); this.backupScheduler = new BackupScheduler(this);
// Initialize optional dcrouter edge gateway integration
this.externalGateway = new ExternalGatewayManager(this);
// Initialize OpsServer (TypedRequest-based server) // Initialize OpsServer (TypedRequest-based server)
this.opsServer = new OpsServer(this); this.opsServer = new OpsServer(this);
} }
@@ -103,11 +111,11 @@ export class Onebox {
// Initialize Docker // Initialize Docker
await this.docker.init(); await this.docker.init();
// Start Caddy log receiver BEFORE reverse proxy (so Caddy can connect to it) // Start proxy log receiver before reverse proxy startup.
try { try {
await this.caddyLogReceiver.start(); await this.proxyLogReceiver.start();
} catch (error) { } catch (error) {
logger.warn(`Failed to start Caddy log receiver: ${getErrorMessage(error)}`); logger.warn(`Failed to start proxy log receiver: ${getErrorMessage(error)}`);
} }
// Initialize Reverse Proxy // Initialize Reverse Proxy
@@ -157,6 +165,14 @@ export class Onebox {
logger.warn('Cloudflare domain sync initialization failed - domain sync will be limited'); logger.warn('Cloudflare domain sync initialization failed - domain sync will be limited');
} }
// Initialize external dcrouter gateway (non-critical)
try {
await this.externalGateway.init();
} catch (error) {
logger.warn('External dcrouter gateway initialization failed - edge sync will be disabled');
logger.warn(`Error: ${getErrorMessage(error)}`);
}
// Initialize Onebox Registry (non-critical) // Initialize Onebox Registry (non-critical)
try { try {
await this.registry.init(); await this.registry.init();
@@ -173,12 +189,28 @@ export class Onebox {
logger.warn(`Error: ${getErrorMessage(error)}`); logger.warn(`Error: ${getErrorMessage(error)}`);
} }
// Initialize App Store (non-critical)
try {
await this.appStore.init();
} catch (error) {
logger.warn('App Store initialization failed - app templates will be unavailable until reconnected');
logger.warn(`Error: ${getErrorMessage(error)}`);
}
// Login to all registries // Login to all registries
await this.registries.loginToAllRegistries(); await this.registries.loginToAllRegistries();
// Start auto-update monitoring for registry services // Start auto-update monitoring for registry services
this.services.startAutoUpdateMonitoring(); this.services.startAutoUpdateMonitoring();
// Initialize BackupManager (containerarchive repository, non-critical)
try {
await this.backupManager.init();
} catch (error) {
logger.warn('BackupManager initialization failed - backups will be limited');
logger.warn(`Error: ${getErrorMessage(error)}`);
}
// Initialize Backup Scheduler (non-critical) // Initialize Backup Scheduler (non-critical)
try { try {
await this.backupScheduler.init(); await this.backupScheduler.init();
@@ -200,24 +232,31 @@ export class Onebox {
*/ */
private async ensureDefaultUser(): Promise<void> { private async ensureDefaultUser(): Promise<void> {
try { try {
const adminUser = this.database.getUserByUsername('admin'); const adminUsername = Deno.env.get('ONEBOX_ADMIN_USERNAME') || 'admin';
const adminUser = this.database.getUserByUsername(adminUsername);
if (!adminUser) { if (!adminUser) {
logger.info('Creating default admin user...'); logger.info(`Creating initial admin user ${adminUsername}...`);
// Simple base64 encoding for now - should use bcrypt in production const configuredPassword = Deno.env.get('ONEBOX_ADMIN_PASSWORD');
const passwordHash = btoa('admin'); const initialPassword = configuredPassword || crypto.randomUUID().replaceAll('-', '');
const passwordHash = await hashPassword(initialPassword);
await this.database.createUser({ await this.database.createUser({
username: 'admin', username: adminUsername,
passwordHash, passwordHash,
role: 'admin', role: 'admin',
createdAt: Date.now(), createdAt: Date.now(),
updatedAt: Date.now(), updatedAt: Date.now(),
}); });
logger.warn('Default admin user created with username: admin, password: admin'); if (configuredPassword) {
logger.warn('IMPORTANT: Change the default password immediately!'); logger.warn(`Initial admin user created from ONEBOX_ADMIN_PASSWORD: ${adminUsername}`);
} else {
logger.warn(`Initial admin user created: ${adminUsername}`);
logger.warn(`Generated one-time admin password: ${initialPassword}`);
}
logger.warn('Change the initial admin password immediately.');
} }
} catch (error) { } catch (error) {
logger.error(`Failed to create default user: ${getErrorMessage(error)}`); logger.error(`Failed to create default user: ${getErrorMessage(error)}`);
@@ -250,9 +289,9 @@ export class Onebox {
const providers = this.platformServices.getAllProviders(); const providers = this.platformServices.getAllProviders();
const platformServicesStatus = providers.map((provider) => { const platformServicesStatus = providers.map((provider) => {
const service = platformServices.find((s) => s.type === provider.type); const service = platformServices.find((s) => s.type === provider.type);
// For Caddy, check actual runtime status since it starts without a DB record // For SmartProxy, check actual runtime status since it starts without a DB record
let status = service?.status || 'not-deployed'; let status = service?.status || 'not-deployed';
if (provider.type === 'caddy') { if (provider.type === 'smartproxy') {
status = proxyStatus.http.running ? 'running' : 'stopped'; status = proxyStatus.http.running ? 'running' : 'stopped';
} }
// Count resources for this platform service // Count resources for this platform service
@@ -414,8 +453,17 @@ export class Onebox {
// Stop reverse proxy if running // Stop reverse proxy if running
await this.reverseProxy.stop(); await this.reverseProxy.stop();
// Stop Caddy log receiver // Stop proxy log receiver
await this.caddyLogReceiver.stop(); await this.proxyLogReceiver.stop();
// Stop built-in registry and backing smartstorage server
await this.registry.stop();
// Close backup archive
await this.backupManager.close();
// Release Docker client resources after all Docker-backed managers stopped.
await this.docker.stop();
// Close database // Close database
this.database.close(); this.database.close();
+3
View File
@@ -8,3 +8,6 @@ export type { IPlatformServiceProvider } from './providers/base.ts';
export { BasePlatformServiceProvider } from './providers/base.ts'; export { BasePlatformServiceProvider } from './providers/base.ts';
export { MongoDBProvider } from './providers/mongodb.ts'; export { MongoDBProvider } from './providers/mongodb.ts';
export { MinioProvider } from './providers/minio.ts'; export { MinioProvider } from './providers/minio.ts';
export { ClickHouseProvider } from './providers/clickhouse.ts';
export { MariaDBProvider } from './providers/mariadb.ts';
export { RedisProvider } from './providers/redis.ts';
+60 -2
View File
@@ -14,8 +14,10 @@ import type {
import type { IPlatformServiceProvider } from './providers/base.ts'; import type { IPlatformServiceProvider } from './providers/base.ts';
import { MongoDBProvider } from './providers/mongodb.ts'; import { MongoDBProvider } from './providers/mongodb.ts';
import { MinioProvider } from './providers/minio.ts'; import { MinioProvider } from './providers/minio.ts';
import { CaddyProvider } from './providers/caddy.ts'; import { SmartProxyProvider } from './providers/smartproxy.ts';
import { ClickHouseProvider } from './providers/clickhouse.ts'; import { ClickHouseProvider } from './providers/clickhouse.ts';
import { MariaDBProvider } from './providers/mariadb.ts';
import { RedisProvider } from './providers/redis.ts';
import { logger } from '../../logging.ts'; import { logger } from '../../logging.ts';
import { getErrorMessage } from '../../utils/error.ts'; import { getErrorMessage } from '../../utils/error.ts';
import { credentialEncryption } from '../encryption.ts'; import { credentialEncryption } from '../encryption.ts';
@@ -39,8 +41,10 @@ export class PlatformServicesManager {
// Register providers // Register providers
this.registerProvider(new MongoDBProvider(this.oneboxRef)); this.registerProvider(new MongoDBProvider(this.oneboxRef));
this.registerProvider(new MinioProvider(this.oneboxRef)); this.registerProvider(new MinioProvider(this.oneboxRef));
this.registerProvider(new CaddyProvider(this.oneboxRef)); this.registerProvider(new SmartProxyProvider(this.oneboxRef));
this.registerProvider(new ClickHouseProvider(this.oneboxRef)); this.registerProvider(new ClickHouseProvider(this.oneboxRef));
this.registerProvider(new MariaDBProvider(this.oneboxRef));
this.registerProvider(new RedisProvider(this.oneboxRef));
logger.info(`Platform services manager initialized with ${this.providers.size} providers`); logger.info(`Platform services manager initialized with ${this.providers.size} providers`);
} }
@@ -304,6 +308,60 @@ export class PlatformServicesManager {
logger.success(`ClickHouse provisioned for service '${service.name}'`); logger.success(`ClickHouse provisioned for service '${service.name}'`);
} }
// Provision Redis if requested
if (requirements.redis) {
logger.info(`Provisioning Redis for service '${service.name}'...`);
// Ensure Redis is running
const redisService = await this.ensureRunning('redis');
const provider = this.providers.get('redis')!;
// Provision cache resource
const result = await provider.provisionResource(service);
// Store resource record
const encryptedCreds = await credentialEncryption.encrypt(result.credentials);
this.oneboxRef.database.createPlatformResource({
platformServiceId: redisService.id!,
serviceId: service.id!,
resourceType: result.type,
resourceName: result.name,
credentialsEncrypted: encryptedCreds,
createdAt: Date.now(),
});
// Merge env vars
Object.assign(allEnvVars, result.envVars);
logger.success(`Redis provisioned for service '${service.name}'`);
}
// Provision MariaDB if requested
if (requirements.mariadb) {
logger.info(`Provisioning MariaDB for service '${service.name}'...`);
// Ensure MariaDB is running
const mariadbService = await this.ensureRunning('mariadb');
const provider = this.providers.get('mariadb')!;
// Provision database
const result = await provider.provisionResource(service);
// Store resource record
const encryptedCreds = await credentialEncryption.encrypt(result.credentials);
this.oneboxRef.database.createPlatformResource({
platformServiceId: mariadbService.id!,
serviceId: service.id!,
resourceType: result.type,
resourceName: result.name,
credentialsEncrypted: encryptedCreds,
createdAt: Date.now(),
});
// Merge env vars
Object.assign(allEnvVars, result.envVars);
logger.success(`MariaDB provisioned for service '${service.name}'`);
}
return allEnvVars; return allEnvVars;
} }
@@ -103,6 +103,17 @@ export abstract class BasePlatformServiceProvider implements IPlatformServicePro
return `onebox-${this.type}`; return `onebox-${this.type}`;
} }
/**
* Get the host data directory for a platform service.
*/
protected getPlatformDataDir(serviceDirectoryArg: string): string {
const configuredDataDir = this.oneboxRef.database.getSetting('dataDir');
const baseDataDir = configuredDataDir ||
(Deno.env.get('ONEBOX_DEV') === 'true' ? './.nogit/platform-data' : '/var/lib/onebox');
const absoluteBaseDataDir = baseDataDir.startsWith('/') ? baseDataDir : `${Deno.cwd()}/${baseDataDir}`;
return `${absoluteBaseDataDir.replace(/\/+$/, '')}/${serviceDirectoryArg}`;
}
/** /**
* Generate a resource name from a user service name * Generate a resource name from a user service name
*/ */
@@ -1,110 +0,0 @@
/**
* Caddy Platform Service Provider
*
* Caddy is a core infrastructure service that provides reverse proxy functionality.
* Unlike other platform services:
* - It doesn't provision resources for user services
* - It's started automatically by Onebox and cannot be stopped by users
* - It delegates to the existing CaddyManager for actual operations
*/
import { BasePlatformServiceProvider } from './base.ts';
import type {
IService,
IPlatformResource,
IPlatformServiceConfig,
IProvisionedResource,
IEnvVarMapping,
TPlatformServiceType,
TPlatformResourceType,
} from '../../../types.ts';
import { logger } from '../../../logging.ts';
import type { Onebox } from '../../onebox.ts';
export class CaddyProvider extends BasePlatformServiceProvider {
readonly type: TPlatformServiceType = 'caddy';
readonly displayName = 'Caddy Reverse Proxy';
readonly resourceTypes: TPlatformResourceType[] = []; // Caddy doesn't provision resources
readonly isCore = true; // Core infrastructure - cannot be stopped by users
constructor(oneboxRef: Onebox) {
super(oneboxRef);
}
getDefaultConfig(): IPlatformServiceConfig {
return {
image: 'caddy:2-alpine',
port: 80,
volumes: [],
environment: {},
};
}
getEnvVarMappings(): IEnvVarMapping[] {
// Caddy doesn't inject any env vars into user services
return [];
}
/**
* Deploy Caddy container - delegates to CaddyManager via reverseProxy
*/
async deployContainer(): Promise<string> {
logger.info('Starting Caddy via reverse proxy manager...');
// Get the reverse proxy which manages Caddy
const reverseProxy = this.oneboxRef.reverseProxy;
// Start reverse proxy (which starts Caddy)
await reverseProxy.startHttp();
// Get Caddy status to find container ID
const status = reverseProxy.getStatus();
// Update platform service record
const platformService = this.oneboxRef.database.getPlatformServiceByType(this.type);
if (platformService) {
this.oneboxRef.database.updatePlatformService(platformService.id!, {
status: 'running',
containerId: 'onebox-caddy', // Service name for Swarm services
});
}
logger.success('Caddy platform service started');
return 'onebox-caddy';
}
/**
* Stop Caddy container - NOT ALLOWED for core infrastructure
*/
async stopContainer(_containerId: string): Promise<void> {
throw new Error('Caddy is a core infrastructure service and cannot be stopped');
}
/**
* Check if Caddy is healthy via the reverse proxy
*/
async healthCheck(): Promise<boolean> {
try {
const reverseProxy = this.oneboxRef.reverseProxy;
const status = reverseProxy.getStatus();
return status.http.running;
} catch (error) {
logger.debug(`Caddy health check failed: ${error}`);
return false;
}
}
/**
* Caddy doesn't provision resources for user services
*/
async provisionResource(_userService: IService): Promise<IProvisionedResource> {
throw new Error('Caddy does not provision resources for user services');
}
/**
* Caddy doesn't deprovision resources
*/
async deprovisionResource(_resource: IPlatformResource, _credentials: Record<string, string>): Promise<void> {
throw new Error('Caddy does not manage resources for user services');
}
}
@@ -30,7 +30,7 @@ export class ClickHouseProvider extends BasePlatformServiceProvider {
return { return {
image: 'clickhouse/clickhouse-server:latest', image: 'clickhouse/clickhouse-server:latest',
port: 8123, // HTTP interface port: 8123, // HTTP interface
volumes: ['/var/lib/onebox/clickhouse:/var/lib/clickhouse'], volumes: [`${this.getPlatformDataDir('clickhouse')}:/var/lib/clickhouse`],
environment: { environment: {
CLICKHOUSE_DB: 'default', CLICKHOUSE_DB: 'default',
// Password will be generated and stored encrypted // Password will be generated and stored encrypted
@@ -53,7 +53,7 @@ export class ClickHouseProvider extends BasePlatformServiceProvider {
async deployContainer(): Promise<string> { async deployContainer(): Promise<string> {
const config = this.getDefaultConfig(); const config = this.getDefaultConfig();
const containerName = this.getContainerName(); const containerName = this.getContainerName();
const dataDir = '/var/lib/onebox/clickhouse'; const dataDir = this.getPlatformDataDir('clickhouse');
logger.info(`Deploying ClickHouse platform service as ${containerName}...`); logger.info(`Deploying ClickHouse platform service as ${containerName}...`);
@@ -76,7 +76,9 @@ export class ClickHouseProvider extends BasePlatformServiceProvider {
if (dataExists && platformService?.adminCredentialsEncrypted) { if (dataExists && platformService?.adminCredentialsEncrypted) {
// Reuse existing credentials from database // Reuse existing credentials from database
logger.info('Reusing existing ClickHouse credentials (data directory already initialized)'); logger.info('Reusing existing ClickHouse credentials (data directory already initialized)');
adminCredentials = await credentialEncryption.decrypt(platformService.adminCredentialsEncrypted); adminCredentials = await credentialEncryption.decrypt<{ username: string; password: string }>(
platformService.adminCredentialsEncrypted,
);
} else { } else {
// Generate new credentials for fresh deployment // Generate new credentials for fresh deployment
logger.info('Generating new ClickHouse admin credentials'); logger.info('Generating new ClickHouse admin credentials');
@@ -191,7 +193,9 @@ export class ClickHouseProvider extends BasePlatformServiceProvider {
throw new Error('ClickHouse platform service not found or not configured'); throw new Error('ClickHouse platform service not found or not configured');
} }
const adminCreds = await credentialEncryption.decrypt(platformService.adminCredentialsEncrypted); const adminCreds = await credentialEncryption.decrypt<{ username: string; password: string }>(
platformService.adminCredentialsEncrypted,
);
const containerName = this.getContainerName(); const containerName = this.getContainerName();
// Generate resource names and credentials // Generate resource names and credentials
@@ -247,7 +251,9 @@ export class ClickHouseProvider extends BasePlatformServiceProvider {
throw new Error('ClickHouse platform service not found or not configured'); throw new Error('ClickHouse platform service not found or not configured');
} }
const adminCreds = await credentialEncryption.decrypt(platformService.adminCredentialsEncrypted); const adminCreds = await credentialEncryption.decrypt<{ username: string; password: string }>(
platformService.adminCredentialsEncrypted,
);
logger.info(`Deprovisioning ClickHouse database '${resource.resourceName}'...`); logger.info(`Deprovisioning ClickHouse database '${resource.resourceName}'...`);
@@ -0,0 +1,281 @@
/**
* MariaDB Platform Service Provider
*/
import { BasePlatformServiceProvider } from './base.ts';
import type {
IService,
IPlatformResource,
IPlatformServiceConfig,
IProvisionedResource,
IEnvVarMapping,
TPlatformServiceType,
TPlatformResourceType,
} from '../../../types.ts';
import { logger } from '../../../logging.ts';
import { getErrorMessage } from '../../../utils/error.ts';
import { credentialEncryption } from '../../encryption.ts';
import type { Onebox } from '../../onebox.ts';
export class MariaDBProvider extends BasePlatformServiceProvider {
readonly type: TPlatformServiceType = 'mariadb';
readonly displayName = 'MariaDB';
readonly resourceTypes: TPlatformResourceType[] = ['database'];
constructor(oneboxRef: Onebox) {
super(oneboxRef);
}
getDefaultConfig(): IPlatformServiceConfig {
return {
image: 'mariadb:11',
port: 3306,
volumes: [`${this.getPlatformDataDir('mariadb')}:/var/lib/mysql`],
environment: {
MARIADB_ROOT_PASSWORD: '',
// Password will be generated and stored encrypted
},
};
}
getEnvVarMappings(): IEnvVarMapping[] {
return [
{ envVar: 'MARIADB_HOST', credentialPath: 'host' },
{ envVar: 'MARIADB_PORT', credentialPath: 'port' },
{ envVar: 'MARIADB_DATABASE', credentialPath: 'database' },
{ envVar: 'MARIADB_USER', credentialPath: 'username' },
{ envVar: 'MARIADB_PASSWORD', credentialPath: 'password' },
{ envVar: 'MARIADB_URI', credentialPath: 'connectionString' },
];
}
async deployContainer(): Promise<string> {
const config = this.getDefaultConfig();
const containerName = this.getContainerName();
const dataDir = this.getPlatformDataDir('mariadb');
logger.info(`Deploying MariaDB platform service as ${containerName}...`);
// Check if we have existing data and stored credentials
const platformService = this.oneboxRef.database.getPlatformServiceByType(this.type);
let adminCredentials: { username: string; password: string };
let dataExists = false;
// Check if data directory has existing MariaDB data
try {
const stat = await Deno.stat(`${dataDir}/ibdata1`);
dataExists = stat.isFile;
logger.info(`MariaDB data directory exists with ibdata1 file`);
} catch {
// ibdata1 file doesn't exist, this is a fresh install
dataExists = false;
}
if (dataExists && platformService?.adminCredentialsEncrypted) {
// Reuse existing credentials from database
logger.info('Reusing existing MariaDB credentials (data directory already initialized)');
adminCredentials = await credentialEncryption.decrypt<{ username: string; password: string }>(
platformService.adminCredentialsEncrypted,
);
} else {
// Generate new credentials for fresh deployment
logger.info('Generating new MariaDB admin credentials');
adminCredentials = {
username: 'root',
password: credentialEncryption.generatePassword(32),
};
// If data exists but we don't have credentials, we need to wipe the data
if (dataExists) {
logger.warn('MariaDB data exists but no credentials in database - wiping data directory');
try {
await Deno.remove(dataDir, { recursive: true });
} catch (e) {
logger.error(`Failed to wipe MariaDB data directory: ${getErrorMessage(e)}`);
throw new Error('Cannot deploy MariaDB: data directory exists without credentials');
}
}
}
// Ensure data directory exists
try {
await Deno.mkdir(dataDir, { recursive: true });
} catch (e) {
// Directory might already exist
if (!(e instanceof Deno.errors.AlreadyExists)) {
logger.warn(`Could not create MariaDB data directory: ${getErrorMessage(e)}`);
}
}
// Create container using Docker API
const envVars = [
`MARIADB_ROOT_PASSWORD=${adminCredentials.password}`,
];
// Use Docker to create the container
const containerId = await this.oneboxRef.docker.createPlatformContainer({
name: containerName,
image: config.image,
port: config.port,
env: envVars,
volumes: config.volumes,
network: this.getNetworkName(),
});
// Store encrypted admin credentials (only update if new or changed)
const encryptedCreds = await credentialEncryption.encrypt(adminCredentials);
if (platformService) {
this.oneboxRef.database.updatePlatformService(platformService.id!, {
containerId,
adminCredentialsEncrypted: encryptedCreds,
status: 'starting',
});
}
logger.success(`MariaDB container created: ${containerId}`);
return containerId;
}
async stopContainer(containerId: string): Promise<void> {
logger.info(`Stopping MariaDB container ${containerId}...`);
await this.oneboxRef.docker.stopContainer(containerId);
logger.success('MariaDB container stopped');
}
async healthCheck(): Promise<boolean> {
try {
logger.info('MariaDB health check: starting...');
const platformService = this.oneboxRef.database.getPlatformServiceByType(this.type);
if (!platformService) {
logger.info('MariaDB health check: platform service not found in database');
return false;
}
if (!platformService.adminCredentialsEncrypted) {
logger.info('MariaDB health check: no admin credentials stored');
return false;
}
if (!platformService.containerId) {
logger.info('MariaDB health check: no container ID in database record');
return false;
}
logger.info(`MariaDB health check: using container ID ${platformService.containerId.substring(0, 12)}...`);
const adminCreds = await credentialEncryption.decrypt(platformService.adminCredentialsEncrypted);
// Use docker exec to run health check inside the container
const result = await this.oneboxRef.docker.execInContainer(
platformService.containerId,
['mariadb-admin', 'ping', '-u', 'root', `-p${adminCreds.password}`]
);
if (result.exitCode === 0) {
logger.info('MariaDB health check: success');
return true;
} else {
logger.info(`MariaDB health check failed: exit code ${result.exitCode}, stderr: ${result.stderr.substring(0, 200)}`);
return false;
}
} catch (error) {
logger.info(`MariaDB health check exception: ${getErrorMessage(error)}`);
return false;
}
}
async provisionResource(userService: IService): Promise<IProvisionedResource> {
const platformService = this.oneboxRef.database.getPlatformServiceByType(this.type);
if (!platformService || !platformService.adminCredentialsEncrypted || !platformService.containerId) {
throw new Error('MariaDB platform service not found or not configured');
}
const adminCreds = await credentialEncryption.decrypt(platformService.adminCredentialsEncrypted);
const containerName = this.getContainerName();
// Generate resource names and credentials
const dbName = this.generateResourceName(userService.name);
const username = this.generateResourceName(userService.name);
const password = credentialEncryption.generatePassword(32);
logger.info(`Provisioning MariaDB database '${dbName}' for service '${userService.name}'...`);
// Create database and user via mariadb inside the container
const sql = [
`CREATE DATABASE IF NOT EXISTS \`${dbName}\`;`,
`CREATE USER IF NOT EXISTS '${username}'@'%' IDENTIFIED BY '${password.replace(/'/g, "\\'")}';`,
`GRANT ALL PRIVILEGES ON \`${dbName}\`.* TO '${username}'@'%';`,
`FLUSH PRIVILEGES;`,
].join(' ');
const result = await this.oneboxRef.docker.execInContainer(
platformService.containerId,
[
'mariadb',
'-u', 'root',
`-p${adminCreds.password}`,
'-e', sql,
]
);
if (result.exitCode !== 0) {
throw new Error(`Failed to provision MariaDB database: exit code ${result.exitCode}, output: ${result.stdout.substring(0, 200)} ${result.stderr.substring(0, 200)}`);
}
logger.success(`MariaDB database '${dbName}' provisioned with user '${username}'`);
// Build the credentials and env vars
const credentials: Record<string, string> = {
host: containerName,
port: '3306',
database: dbName,
username,
password,
connectionString: `mysql://${username}:${password}@${containerName}:3306/${dbName}`,
};
// Map credentials to env vars
const envVars: Record<string, string> = {};
for (const mapping of this.getEnvVarMappings()) {
if (credentials[mapping.credentialPath]) {
envVars[mapping.envVar] = credentials[mapping.credentialPath];
}
}
return {
type: 'database',
name: dbName,
credentials,
envVars,
};
}
async deprovisionResource(resource: IPlatformResource, credentials: Record<string, string>): Promise<void> {
const platformService = this.oneboxRef.database.getPlatformServiceByType(this.type);
if (!platformService || !platformService.adminCredentialsEncrypted || !platformService.containerId) {
throw new Error('MariaDB platform service not found or not configured');
}
const adminCreds = await credentialEncryption.decrypt(platformService.adminCredentialsEncrypted);
logger.info(`Deprovisioning MariaDB database '${resource.resourceName}'...`);
const sql = [
`DROP USER IF EXISTS '${credentials.username}'@'%';`,
`DROP DATABASE IF EXISTS \`${resource.resourceName}\`;`,
].join(' ');
const result = await this.oneboxRef.docker.execInContainer(
platformService.containerId,
[
'mariadb',
'-u', 'root',
`-p${adminCreds.password}`,
'-e', sql,
]
);
if (result.exitCode !== 0) {
logger.warn(`MariaDB deprovision returned exit code ${result.exitCode}: ${result.stderr.substring(0, 200)}`);
}
logger.success(`MariaDB database '${resource.resourceName}' dropped`);
}
}
@@ -30,7 +30,7 @@ export class MinioProvider extends BasePlatformServiceProvider {
return { return {
image: 'minio/minio:latest', image: 'minio/minio:latest',
port: 9000, port: 9000,
volumes: ['/var/lib/onebox/minio:/data'], volumes: [`${this.getPlatformDataDir('minio')}:/data`],
command: 'server /data --console-address :9001', command: 'server /data --console-address :9001',
environment: { environment: {
MINIO_ROOT_USER: 'admin', MINIO_ROOT_USER: 'admin',
@@ -57,7 +57,7 @@ export class MinioProvider extends BasePlatformServiceProvider {
async deployContainer(): Promise<string> { async deployContainer(): Promise<string> {
const config = this.getDefaultConfig(); const config = this.getDefaultConfig();
const containerName = this.getContainerName(); const containerName = this.getContainerName();
const dataDir = '/var/lib/onebox/minio'; const dataDir = this.getPlatformDataDir('minio');
logger.info(`Deploying MinIO platform service as ${containerName}...`); logger.info(`Deploying MinIO platform service as ${containerName}...`);
@@ -80,7 +80,9 @@ export class MinioProvider extends BasePlatformServiceProvider {
if (dataExists && platformService?.adminCredentialsEncrypted) { if (dataExists && platformService?.adminCredentialsEncrypted) {
// Reuse existing credentials from database // Reuse existing credentials from database
logger.info('Reusing existing MinIO credentials (data directory already initialized)'); logger.info('Reusing existing MinIO credentials (data directory already initialized)');
adminCredentials = await credentialEncryption.decrypt(platformService.adminCredentialsEncrypted); adminCredentials = await credentialEncryption.decrypt<{ username: string; password: string }>(
platformService.adminCredentialsEncrypted,
);
} else { } else {
// Generate new credentials for fresh deployment // Generate new credentials for fresh deployment
logger.info('Generating new MinIO admin credentials'); logger.info('Generating new MinIO admin credentials');
@@ -30,7 +30,7 @@ export class MongoDBProvider extends BasePlatformServiceProvider {
return { return {
image: 'mongo:4.4', image: 'mongo:4.4',
port: 27017, port: 27017,
volumes: ['/var/lib/onebox/mongodb:/data/db'], volumes: [`${this.getPlatformDataDir('mongodb')}:/data/db`],
environment: { environment: {
MONGO_INITDB_ROOT_USERNAME: 'admin', MONGO_INITDB_ROOT_USERNAME: 'admin',
// Password will be generated and stored encrypted // Password will be generated and stored encrypted
@@ -52,7 +52,7 @@ export class MongoDBProvider extends BasePlatformServiceProvider {
async deployContainer(): Promise<string> { async deployContainer(): Promise<string> {
const config = this.getDefaultConfig(); const config = this.getDefaultConfig();
const containerName = this.getContainerName(); const containerName = this.getContainerName();
const dataDir = '/var/lib/onebox/mongodb'; const dataDir = this.getPlatformDataDir('mongodb');
logger.info(`Deploying MongoDB platform service as ${containerName}...`); logger.info(`Deploying MongoDB platform service as ${containerName}...`);
@@ -74,7 +74,9 @@ export class MongoDBProvider extends BasePlatformServiceProvider {
if (dataExists && platformService?.adminCredentialsEncrypted) { if (dataExists && platformService?.adminCredentialsEncrypted) {
// Reuse existing credentials from database // Reuse existing credentials from database
logger.info('Reusing existing MongoDB credentials (data directory already initialized)'); logger.info('Reusing existing MongoDB credentials (data directory already initialized)');
adminCredentials = await credentialEncryption.decrypt(platformService.adminCredentialsEncrypted); adminCredentials = await credentialEncryption.decrypt<{ username: string; password: string }>(
platformService.adminCredentialsEncrypted,
);
} else { } else {
// Generate new credentials for fresh deployment // Generate new credentials for fresh deployment
logger.info('Generating new MongoDB admin credentials'); logger.info('Generating new MongoDB admin credentials');
@@ -0,0 +1,285 @@
/**
* Redis Platform Service Provider
*/
import { BasePlatformServiceProvider } from './base.ts';
import type {
IService,
IPlatformResource,
IPlatformServiceConfig,
IProvisionedResource,
IEnvVarMapping,
TPlatformServiceType,
TPlatformResourceType,
} from '../../../types.ts';
import { logger } from '../../../logging.ts';
import { getErrorMessage } from '../../../utils/error.ts';
import { credentialEncryption } from '../../encryption.ts';
import type { Onebox } from '../../onebox.ts';
export class RedisProvider extends BasePlatformServiceProvider {
readonly type: TPlatformServiceType = 'redis';
readonly displayName = 'Redis';
readonly resourceTypes: TPlatformResourceType[] = ['cache'];
constructor(oneboxRef: Onebox) {
super(oneboxRef);
}
getDefaultConfig(): IPlatformServiceConfig {
return {
image: 'redis:7-alpine',
port: 6379,
volumes: [`${this.getPlatformDataDir('redis')}:/data`],
environment: {},
};
}
getEnvVarMappings(): IEnvVarMapping[] {
return [
{ envVar: 'REDIS_HOST', credentialPath: 'host' },
{ envVar: 'REDIS_PORT', credentialPath: 'port' },
{ envVar: 'REDIS_PASSWORD', credentialPath: 'password' },
{ envVar: 'REDIS_DB', credentialPath: 'db' },
{ envVar: 'REDIS_URL', credentialPath: 'connectionString' },
];
}
async deployContainer(): Promise<string> {
const config = this.getDefaultConfig();
const containerName = this.getContainerName();
const dataDir = this.getPlatformDataDir('redis');
logger.info(`Deploying Redis platform service as ${containerName}...`);
// Check if we have existing data and stored credentials
const platformService = this.oneboxRef.database.getPlatformServiceByType(this.type);
let adminCredentials: { username: string; password: string };
let dataExists = false;
// Check if data directory has existing Redis data
try {
const stat = await Deno.stat(`${dataDir}/dump.rdb`);
dataExists = stat.isFile;
logger.info(`Redis data directory exists with dump.rdb file`);
} catch {
// Also check for appendonly file
try {
const stat = await Deno.stat(`${dataDir}/appendonly.aof`);
dataExists = stat.isFile;
logger.info(`Redis data directory exists with appendonly.aof file`);
} catch {
dataExists = false;
}
}
if (dataExists && platformService?.adminCredentialsEncrypted) {
// Reuse existing credentials from database
logger.info('Reusing existing Redis credentials (data directory already initialized)');
adminCredentials = await credentialEncryption.decrypt<{ username: string; password: string }>(
platformService.adminCredentialsEncrypted,
);
} else {
// Generate new credentials for fresh deployment
logger.info('Generating new Redis admin credentials');
adminCredentials = {
username: 'default',
password: credentialEncryption.generatePassword(32),
};
// If data exists but we don't have credentials, we need to wipe the data
if (dataExists) {
logger.warn('Redis data exists but no credentials in database - wiping data directory');
try {
await Deno.remove(dataDir, { recursive: true });
} catch (e) {
logger.error(`Failed to wipe Redis data directory: ${getErrorMessage(e)}`);
throw new Error('Cannot deploy Redis: data directory exists without credentials');
}
}
}
// Ensure data directory exists
try {
await Deno.mkdir(dataDir, { recursive: true });
} catch (e) {
// Directory might already exist
if (!(e instanceof Deno.errors.AlreadyExists)) {
logger.warn(`Could not create Redis data directory: ${getErrorMessage(e)}`);
}
}
// Redis uses command args for password, not env vars
const containerId = await this.oneboxRef.docker.createPlatformContainer({
name: containerName,
image: config.image,
port: config.port,
env: [],
volumes: config.volumes,
network: this.getNetworkName(),
command: ['redis-server', '--requirepass', adminCredentials.password, '--appendonly', 'yes'],
});
// Store encrypted admin credentials (only update if new or changed)
const encryptedCreds = await credentialEncryption.encrypt(adminCredentials);
if (platformService) {
this.oneboxRef.database.updatePlatformService(platformService.id!, {
containerId,
adminCredentialsEncrypted: encryptedCreds,
status: 'starting',
});
}
logger.success(`Redis container created: ${containerId}`);
return containerId;
}
async stopContainer(containerId: string): Promise<void> {
logger.info(`Stopping Redis container ${containerId}...`);
await this.oneboxRef.docker.stopContainer(containerId);
logger.success('Redis container stopped');
}
async healthCheck(): Promise<boolean> {
try {
logger.info('Redis health check: starting...');
const platformService = this.oneboxRef.database.getPlatformServiceByType(this.type);
if (!platformService) {
logger.info('Redis health check: platform service not found in database');
return false;
}
if (!platformService.adminCredentialsEncrypted) {
logger.info('Redis health check: no admin credentials stored');
return false;
}
if (!platformService.containerId) {
logger.info('Redis health check: no container ID in database record');
return false;
}
logger.info(`Redis health check: using container ID ${platformService.containerId.substring(0, 12)}...`);
const adminCreds = await credentialEncryption.decrypt(platformService.adminCredentialsEncrypted);
// Use docker exec to run health check inside the container
const result = await this.oneboxRef.docker.execInContainer(
platformService.containerId,
['redis-cli', '-a', adminCreds.password, 'ping']
);
if (result.exitCode === 0 && result.stdout.includes('PONG')) {
logger.info('Redis health check: success');
return true;
} else {
logger.info(`Redis health check failed: exit code ${result.exitCode}, stdout: ${result.stdout.substring(0, 200)}`);
return false;
}
} catch (error) {
logger.info(`Redis health check exception: ${getErrorMessage(error)}`);
return false;
}
}
async provisionResource(userService: IService): Promise<IProvisionedResource> {
const platformService = this.oneboxRef.database.getPlatformServiceByType(this.type);
if (!platformService || !platformService.adminCredentialsEncrypted) {
throw new Error('Redis platform service not found or not configured');
}
const adminCreds = await credentialEncryption.decrypt(platformService.adminCredentialsEncrypted);
const containerName = this.getContainerName();
// Determine the next available DB index (1-15, reserving 0 for admin)
const existingResources = this.oneboxRef.database.getPlatformResourcesByPlatformService(platformService.id!);
const usedIndexes = new Set<number>();
for (const resource of existingResources) {
try {
const creds = await credentialEncryption.decrypt(resource.credentialsEncrypted);
if (creds.db) {
usedIndexes.add(parseInt(creds.db, 10));
}
} catch {
// Skip resources with corrupt credentials
}
}
let dbIndex = -1;
for (let i = 1; i <= 15; i++) {
if (!usedIndexes.has(i)) {
dbIndex = i;
break;
}
}
if (dbIndex === -1) {
throw new Error('No available Redis database indexes (max 15 services per Redis instance)');
}
const resourceName = this.generateResourceName(userService.name);
logger.info(`Provisioning Redis database index ${dbIndex} for service '${userService.name}'...`);
// No server-side creation needed - Redis DB indexes exist implicitly
// Just verify connectivity
if (platformService.containerId) {
const result = await this.oneboxRef.docker.execInContainer(
platformService.containerId,
['redis-cli', '-a', adminCreds.password, '-n', String(dbIndex), 'ping']
);
if (result.exitCode !== 0 || !result.stdout.includes('PONG')) {
throw new Error(`Failed to verify Redis database ${dbIndex}: exit code ${result.exitCode}`);
}
}
logger.success(`Redis database index ${dbIndex} provisioned for service '${userService.name}'`);
// Build the credentials and env vars
const credentials: Record<string, string> = {
host: containerName,
port: '6379',
password: adminCreds.password,
db: String(dbIndex),
connectionString: `redis://:${adminCreds.password}@${containerName}:6379/${dbIndex}`,
};
// Map credentials to env vars
const envVars: Record<string, string> = {};
for (const mapping of this.getEnvVarMappings()) {
if (credentials[mapping.credentialPath]) {
envVars[mapping.envVar] = credentials[mapping.credentialPath];
}
}
return {
type: 'cache',
name: resourceName,
credentials,
envVars,
};
}
async deprovisionResource(resource: IPlatformResource, credentials: Record<string, string>): Promise<void> {
const platformService = this.oneboxRef.database.getPlatformServiceByType(this.type);
if (!platformService || !platformService.adminCredentialsEncrypted || !platformService.containerId) {
throw new Error('Redis platform service not found or not configured');
}
const adminCreds = await credentialEncryption.decrypt(platformService.adminCredentialsEncrypted);
const dbIndex = credentials.db || '0';
logger.info(`Deprovisioning Redis database index ${dbIndex} for resource '${resource.resourceName}'...`);
// Flush the specific database
const result = await this.oneboxRef.docker.execInContainer(
platformService.containerId,
['redis-cli', '-a', adminCreds.password, '-n', dbIndex, 'FLUSHDB']
);
if (result.exitCode !== 0) {
logger.warn(`Redis deprovision returned exit code ${result.exitCode}: ${result.stderr.substring(0, 200)}`);
}
logger.success(`Redis database index ${dbIndex} flushed for resource '${resource.resourceName}'`);
}
}
@@ -0,0 +1,87 @@
/**
* SmartProxy Platform Service Provider
*
* SmartProxy is a core infrastructure service that provides reverse proxy functionality.
* Unlike other platform services:
* - It doesn't provision resources for user services
* - It's started automatically by Onebox and cannot be stopped by users
* - It delegates to the existing reverse proxy manager for actual operations
*/
import { BasePlatformServiceProvider } from './base.ts';
import type {
IService,
IPlatformResource,
IPlatformServiceConfig,
IProvisionedResource,
IEnvVarMapping,
TPlatformServiceType,
TPlatformResourceType,
} from '../../../types.ts';
import { logger } from '../../../logging.ts';
import type { Onebox } from '../../onebox.ts';
export class SmartProxyProvider extends BasePlatformServiceProvider {
readonly type: TPlatformServiceType = 'smartproxy';
readonly displayName = 'SmartProxy Reverse Proxy';
readonly resourceTypes: TPlatformResourceType[] = [];
readonly isCore = true;
constructor(oneboxRef: Onebox) {
super(oneboxRef);
}
getDefaultConfig(): IPlatformServiceConfig {
return {
image: 'code.foss.global/host.today/ht-docker-smartproxy:latest',
port: 80,
volumes: [],
environment: {},
};
}
getEnvVarMappings(): IEnvVarMapping[] {
return [];
}
async deployContainer(): Promise<string> {
logger.info('Starting SmartProxy via reverse proxy manager...');
const reverseProxy = this.oneboxRef.reverseProxy;
await reverseProxy.startHttp();
const platformService = this.oneboxRef.database.getPlatformServiceByType(this.type);
if (platformService) {
this.oneboxRef.database.updatePlatformService(platformService.id!, {
status: 'running',
containerId: 'onebox-smartproxy',
});
}
logger.success('SmartProxy platform service started');
return 'onebox-smartproxy';
}
async stopContainer(_containerId: string): Promise<void> {
throw new Error('SmartProxy is a core infrastructure service and cannot be stopped');
}
async healthCheck(): Promise<boolean> {
try {
const reverseProxy = this.oneboxRef.reverseProxy;
const status = reverseProxy.getStatus();
return status.http.running;
} catch (error) {
logger.debug(`SmartProxy health check failed: ${error}`);
return false;
}
}
async provisionResource(_userService: IService): Promise<IProvisionedResource> {
throw new Error('SmartProxy does not provision resources for user services');
}
async deprovisionResource(_resource: IPlatformResource, _credentials: Record<string, string>): Promise<void> {
throw new Error('SmartProxy does not manage resources for user services');
}
}
@@ -1,7 +1,7 @@
/** /**
* Caddy Log Receiver for Onebox * Proxy Log Receiver for Onebox
* *
* TCP server that receives access logs from Caddy and broadcasts them to WebSocket clients. * TCP server that receives reverse proxy access logs and broadcasts them to WebSocket clients.
* Supports per-client filtering by domain and adaptive sampling at high volume. * Supports per-client filtering by domain and adaptive sampling at high volume.
*/ */
@@ -18,9 +18,9 @@ export interface ILogFilter {
} }
/** /**
* Caddy access log entry structure (from Caddy JSON format) * Reverse proxy access log entry structure.
*/ */
export interface ICaddyAccessLog { export interface IProxyAccessLog {
ts: number; ts: number;
level?: string; level?: string;
logger?: string; logger?: string;
@@ -60,14 +60,17 @@ interface ILogClient {
} }
/** /**
* CaddyLogReceiver - TCP server for Caddy access logs * ProxyLogReceiver - TCP server for reverse proxy access logs
*/ */
export class CaddyLogReceiver { export class ProxyLogReceiver {
private server: Deno.TcpListener | null = null; private server: Deno.TcpListener | null = null;
private clients: Map<string, ILogClient> = new Map(); private clients: Map<string, ILogClient> = new Map();
private port: number; private port: number;
private running = false; private running = false;
private connections: Set<Deno.TcpConn> = new Set(); private connections: Set<Deno.TcpConn> = new Set();
private connectionReaders: Map<Deno.TcpConn, ReadableStreamDefaultReader<Uint8Array>> = new Map();
private connectionHandlers: Set<Promise<void>> = new Set();
private acceptTask: Promise<void> | null = null;
// Adaptive sampling state // Adaptive sampling state
private logCountWindow: number[] = []; // timestamps of recent logs private logCountWindow: number[] = []; // timestamps of recent logs
@@ -76,7 +79,7 @@ export class CaddyLogReceiver {
private logCounter = 0; private logCounter = 0;
// Ring buffer for recent logs (for late-joining clients) // Ring buffer for recent logs (for late-joining clients)
private recentLogs: ICaddyAccessLog[] = []; private recentLogs: IProxyAccessLog[] = [];
private maxRecentLogs = 100; private maxRecentLogs = 100;
// Traffic stats aggregation (hourly rolling window) // Traffic stats aggregation (hourly rolling window)
@@ -137,7 +140,7 @@ export class CaddyLogReceiver {
/** /**
* Record a request in traffic stats * Record a request in traffic stats
*/ */
private recordTrafficStats(log: ICaddyAccessLog): void { private recordTrafficStats(log: IProxyAccessLog): void {
const bucket = this.getCurrentStatsBucket(); const bucket = this.getCurrentStatsBucket();
bucket.requestCount++; bucket.requestCount++;
@@ -164,25 +167,25 @@ export class CaddyLogReceiver {
*/ */
async start(): Promise<void> { async start(): Promise<void> {
if (this.running) { if (this.running) {
logger.warn('CaddyLogReceiver is already running'); logger.warn('ProxyLogReceiver is already running');
return; return;
} }
try { try {
this.server = Deno.listen({ port: this.port, transport: 'tcp' }); this.server = Deno.listen({ port: this.port, transport: 'tcp' });
this.running = true; this.running = true;
logger.success(`CaddyLogReceiver started on TCP port ${this.port}`); logger.success(`ProxyLogReceiver started on TCP port ${this.port}`);
// Start accepting connections in background // Start accepting connections in background
this.acceptConnections(); this.acceptTask = this.acceptConnections();
} catch (error) { } catch (error) {
logger.error(`Failed to start CaddyLogReceiver: ${getErrorMessage(error)}`); logger.error(`Failed to start ProxyLogReceiver: ${getErrorMessage(error)}`);
throw error; throw error;
} }
} }
/** /**
* Accept incoming TCP connections from Caddy * Accept incoming TCP connections from the reverse proxy
*/ */
private async acceptConnections(): Promise<void> { private async acceptConnections(): Promise<void> {
if (!this.server) return; if (!this.server) return;
@@ -190,23 +193,26 @@ export class CaddyLogReceiver {
try { try {
for await (const conn of this.server) { for await (const conn of this.server) {
this.connections.add(conn); this.connections.add(conn);
this.handleConnection(conn); const handlerTask = this.handleConnection(conn);
this.connectionHandlers.add(handlerTask);
handlerTask.finally(() => this.connectionHandlers.delete(handlerTask));
} }
} catch (error) { } catch (error) {
if (this.running) { if (this.running) {
logger.error(`CaddyLogReceiver accept error: ${getErrorMessage(error)}`); logger.error(`ProxyLogReceiver accept error: ${getErrorMessage(error)}`);
} }
} }
} }
/** /**
* Handle a single TCP connection from Caddy * Handle a single TCP connection from the reverse proxy
*/ */
private async handleConnection(conn: Deno.TcpConn): Promise<void> { private async handleConnection(conn: Deno.TcpConn): Promise<void> {
const remoteAddr = conn.remoteAddr as Deno.NetAddr; const remoteAddr = conn.remoteAddr as Deno.NetAddr;
logger.debug(`CaddyLogReceiver: Connection from ${remoteAddr.hostname}:${remoteAddr.port}`); logger.debug(`ProxyLogReceiver: Connection from ${remoteAddr.hostname}:${remoteAddr.port}`);
const reader = conn.readable.getReader(); const reader = conn.readable.getReader();
this.connectionReaders.set(conn, reader);
const decoder = new TextDecoder(); const decoder = new TextDecoder();
let buffer = ''; let buffer = '';
@@ -217,7 +223,7 @@ export class CaddyLogReceiver {
buffer += decoder.decode(value, { stream: true }); buffer += decoder.decode(value, { stream: true });
// Process complete lines (Caddy sends newline-delimited JSON) // Process complete newline-delimited JSON log lines.
const lines = buffer.split('\n'); const lines = buffer.split('\n');
buffer = lines.pop() || ''; // Keep incomplete line in buffer buffer = lines.pop() || ''; // Keep incomplete line in buffer
@@ -229,10 +235,16 @@ export class CaddyLogReceiver {
} }
} catch (error) { } catch (error) {
if (this.running) { if (this.running) {
logger.debug(`CaddyLogReceiver connection closed: ${getErrorMessage(error)}`); logger.debug(`ProxyLogReceiver connection closed: ${getErrorMessage(error)}`);
} }
} finally { } finally {
this.connectionReaders.delete(conn);
this.connections.delete(conn); this.connections.delete(conn);
try {
reader.releaseLock();
} catch {
// Reader may already be released after cancellation during shutdown.
}
try { try {
conn.close(); conn.close();
} catch { } catch {
@@ -242,18 +254,18 @@ export class CaddyLogReceiver {
} }
/** /**
* Process a single log line from Caddy * Process a single access log line
*/ */
private processLogLine(line: string): void { private processLogLine(line: string): void {
try { try {
const log = JSON.parse(line) as ICaddyAccessLog; const log = JSON.parse(line) as IProxyAccessLog;
// Only process access logs (check for http.log.access or just access, or any log with request/status) // Only process access logs (check for http.log.access or just access, or any log with request/status)
const isAccessLog = log.logger === 'http.log.access' || const isAccessLog = log.logger === 'http.log.access' ||
log.logger === 'access' || log.logger === 'access' ||
(log.request && typeof log.status === 'number'); (log.request && typeof log.status === 'number');
if (!isAccessLog) { if (!isAccessLog) {
logger.debug(`CaddyLogReceiver: Skipping non-access log: ${log.logger || 'unknown'}`); logger.debug(`ProxyLogReceiver: Skipping non-access log: ${log.logger || 'unknown'}`);
return; return;
} }
@@ -268,7 +280,7 @@ export class CaddyLogReceiver {
return; return;
} }
logger.debug(`CaddyLogReceiver: Access log received - ${log.request?.method} ${log.request?.host}${log.request?.uri} (status: ${log.status})`); logger.debug(`ProxyLogReceiver: Access log received - ${log.request?.method} ${log.request?.host}${log.request?.uri} (status: ${log.status})`);
// Add to recent logs buffer // Add to recent logs buffer
this.recentLogs.push(log); this.recentLogs.push(log);
@@ -277,10 +289,10 @@ export class CaddyLogReceiver {
} }
// Broadcast to WebSocket clients (log how many clients) // Broadcast to WebSocket clients (log how many clients)
logger.debug(`CaddyLogReceiver: Broadcasting to ${this.clients.size} clients`); logger.debug(`ProxyLogReceiver: Broadcasting to ${this.clients.size} clients`);
this.broadcast(log); this.broadcast(log);
} catch (error) { } catch (error) {
logger.debug(`Failed to parse Caddy log line: ${getErrorMessage(error)}`); logger.debug(`Failed to parse proxy log line: ${getErrorMessage(error)}`);
} }
} }
@@ -317,7 +329,7 @@ export class CaddyLogReceiver {
/** /**
* Broadcast a log entry to all connected WebSocket clients * Broadcast a log entry to all connected WebSocket clients
*/ */
private broadcast(log: ICaddyAccessLog): void { private broadcast(log: IProxyAccessLog): void {
const message = JSON.stringify({ const message = JSON.stringify({
type: 'access_log', type: 'access_log',
data: { data: {
@@ -365,7 +377,7 @@ export class CaddyLogReceiver {
/** /**
* Check if a log entry matches a client's filter * Check if a log entry matches a client's filter
*/ */
private matchesFilter(log: ICaddyAccessLog, filter: ILogFilter): boolean { private matchesFilter(log: IProxyAccessLog, filter: ILogFilter): boolean {
// Domain filter // Domain filter
if (filter.domain) { if (filter.domain) {
const logHost = log.request.host.toLowerCase(); const logHost = log.request.host.toLowerCase();
@@ -385,7 +397,7 @@ export class CaddyLogReceiver {
*/ */
addClient(clientId: string, ws: WebSocket, filter: ILogFilter = {}): void { addClient(clientId: string, ws: WebSocket, filter: ILogFilter = {}): void {
this.clients.set(clientId, { id: clientId, ws, filter }); this.clients.set(clientId, { id: clientId, ws, filter });
logger.debug(`CaddyLogReceiver: Added client ${clientId} (${this.clients.size} total)`); logger.debug(`ProxyLogReceiver: Added client ${clientId} (${this.clients.size} total)`);
// Send recent logs to new client // Send recent logs to new client
for (const log of this.recentLogs) { for (const log of this.recentLogs) {
@@ -422,7 +434,7 @@ export class CaddyLogReceiver {
*/ */
removeClient(clientId: string): void { removeClient(clientId: string): void {
if (this.clients.delete(clientId)) { if (this.clients.delete(clientId)) {
logger.debug(`CaddyLogReceiver: Removed client ${clientId} (${this.clients.size} remaining)`); logger.debug(`ProxyLogReceiver: Removed client ${clientId} (${this.clients.size} remaining)`);
} }
} }
@@ -433,7 +445,7 @@ export class CaddyLogReceiver {
const client = this.clients.get(clientId); const client = this.clients.get(clientId);
if (client) { if (client) {
client.filter = filter; client.filter = filter;
logger.debug(`CaddyLogReceiver: Updated filter for client ${clientId}`); logger.debug(`ProxyLogReceiver: Updated filter for client ${clientId}`);
} }
} }
@@ -447,6 +459,11 @@ export class CaddyLogReceiver {
this.running = false; this.running = false;
// Cancel pending reads before closing sockets so background handlers can finish.
await Promise.allSettled(
Array.from(this.connectionReaders.values()).map((reader) => reader.cancel()),
);
// Close all connections // Close all connections
for (const conn of this.connections) { for (const conn of this.connections) {
try { try {
@@ -467,10 +484,19 @@ export class CaddyLogReceiver {
this.server = null; this.server = null;
} }
if (this.acceptTask) {
await this.acceptTask.catch(() => {});
this.acceptTask = null;
}
await Promise.allSettled(this.connectionHandlers);
this.connectionHandlers.clear();
this.connectionReaders.clear();
// Clear clients // Clear clients
this.clients.clear(); this.clients.clear();
logger.info('CaddyLogReceiver stopped'); logger.info('ProxyLogReceiver stopped');
} }
/** /**
+17 -8
View File
@@ -9,6 +9,9 @@ import type { IRegistry } from '../types.ts';
import { logger } from '../logging.ts'; import { logger } from '../logging.ts';
import { getErrorMessage } from '../utils/error.ts'; import { getErrorMessage } from '../utils/error.ts';
import { OneboxDatabase } from './database.ts'; import { OneboxDatabase } from './database.ts';
import { credentialEncryption } from './encryption.ts';
const encryptedPasswordPrefix = 'enc:v1:';
export class OneboxRegistriesManager { export class OneboxRegistriesManager {
private oneboxRef: any; // Will be Onebox instance private oneboxRef: any; // Will be Onebox instance
@@ -22,17 +25,23 @@ export class OneboxRegistriesManager {
/** /**
* Encrypt a password (simple base64 for now, should use proper encryption) * Encrypt a password (simple base64 for now, should use proper encryption)
*/ */
private encryptPassword(password: string): string { private async encryptPassword(password: string): Promise<string> {
// TODO: Use proper encryption with a secret key const encrypted = await credentialEncryption.encrypt({ password });
// For now, using base64 encoding (NOT SECURE, just for structure) return `${encryptedPasswordPrefix}${encrypted}`;
return plugins.encoding.encodeBase64(new TextEncoder().encode(password));
} }
/** /**
* Decrypt a password * Decrypt a password
*/ */
private decryptPassword(encrypted: string): string { private async decryptPassword(encrypted: string): Promise<string> {
// TODO: Use proper decryption if (encrypted.startsWith(encryptedPasswordPrefix)) {
const decrypted = await credentialEncryption.decrypt<{ password: string }>(
encrypted.slice(encryptedPasswordPrefix.length),
);
return decrypted.password;
}
// Legacy compatibility for older databases that stored base64-encoded passwords.
return new TextDecoder().decode(plugins.encoding.decodeBase64(encrypted)); return new TextDecoder().decode(plugins.encoding.decodeBase64(encrypted));
} }
@@ -48,7 +57,7 @@ export class OneboxRegistriesManager {
} }
// Encrypt password // Encrypt password
const passwordEncrypted = this.encryptPassword(password); const passwordEncrypted = await this.encryptPassword(password);
// Create registry in database // Create registry in database
const registry = await this.database.createRegistry({ const registry = await this.database.createRegistry({
@@ -111,7 +120,7 @@ export class OneboxRegistriesManager {
try { try {
logger.info(`Logging into registry: ${registry.url}`); logger.info(`Logging into registry: ${registry.url}`);
const password = this.decryptPassword(registry.passwordEncrypted); const password = await this.decryptPassword(registry.passwordEncrypted);
// Use docker login command // Use docker login command
const command = [ const command = [
+22 -12
View File
@@ -2,7 +2,7 @@
* Onebox Registry Manager * Onebox Registry Manager
* *
* Manages the local Docker registry using: * Manages the local Docker registry using:
* - @push.rocks/smarts3 (S3-compatible server with filesystem storage) * - @push.rocks/smartstorage (S3-compatible server with filesystem storage)
* - @push.rocks/smartregistry (OCI-compliant Docker registry) * - @push.rocks/smartregistry (OCI-compliant Docker registry)
*/ */
@@ -27,7 +27,7 @@ export class RegistryManager {
} }
/** /**
* Initialize the registry (start smarts3 and smartregistry) * Initialize the registry (start smartstorage and smartregistry)
*/ */
async init(): Promise<void> { async init(): Promise<void> {
if (this.isInitialized) { if (this.isInitialized) {
@@ -39,10 +39,10 @@ export class RegistryManager {
const dataDir = this.options.dataDir || './.nogit/registry-data'; const dataDir = this.options.dataDir || './.nogit/registry-data';
const port = this.options.port || 4000; const port = this.options.port || 4000;
logger.info(`Starting smarts3 server on port ${port}...`); logger.info(`Starting smartstorage server on port ${port}...`);
// 1. Start smarts3 server (S3-compatible storage with filesystem backend) // 1. Start smartstorage server (S3-compatible storage with filesystem backend)
this.s3Server = await plugins.smarts3.Smarts3.createAndStart({ this.s3Server = await plugins.smartstorage.SmartStorage.createAndStart({
server: { server: {
port: port, port: port,
address: '0.0.0.0', address: '0.0.0.0',
@@ -53,16 +53,16 @@ export class RegistryManager {
}, },
}); });
logger.success(`smarts3 server started on port ${port}`); logger.success(`smartstorage server started on port ${port}`);
// 2. Configure smartregistry to use smarts3 // 2. Configure smartregistry to use smartstorage
logger.info('Initializing smartregistry...'); logger.info('Initializing smartregistry...');
this.registry = new plugins.smartregistry.SmartRegistry({ this.registry = new plugins.smartregistry.SmartRegistry({
storage: { storage: {
endpoint: 'localhost', endpoint: 'localhost',
port: port, port: port,
accessKey: 'onebox', // smarts3 doesn't validate credentials accessKey: 'onebox', // smartstorage doesn't validate credentials
accessSecret: 'onebox', accessSecret: 'onebox',
useSsl: false, useSsl: false,
region: 'us-east-1', region: 'us-east-1',
@@ -76,7 +76,7 @@ export class RegistryManager {
}, },
ociTokens: { ociTokens: {
enabled: true, enabled: true,
realm: 'http://localhost:3000/v2/token', realm: `http://${this.baseUrl}/v2/token`,
service: 'onebox-registry', service: 'onebox-registry',
}, },
}, },
@@ -314,16 +314,26 @@ export class RegistryManager {
} }
/** /**
* Stop the registry and smarts3 server * Stop the registry and smartstorage server
*/ */
async stop(): Promise<void> { async stop(): Promise<void> {
if (this.registry) {
try {
this.registry.destroy?.();
} catch (error) {
logger.error(`Error destroying smartregistry: ${getErrorMessage(error)}`);
}
this.registry = null;
}
if (this.s3Server) { if (this.s3Server) {
try { try {
await this.s3Server.stop(); await this.s3Server.stop();
logger.info('smarts3 server stopped'); logger.info('smartstorage server stopped');
} catch (error) { } catch (error) {
logger.error(`Error stopping smarts3: ${getErrorMessage(error)}`); logger.error(`Error stopping smartstorage: ${getErrorMessage(error)}`);
} }
this.s3Server = null;
} }
this.isInitialized = false; this.isInitialized = false;
+38 -44
View File
@@ -1,8 +1,8 @@
/** /**
* Reverse Proxy for Onebox * Reverse Proxy for Onebox
* *
* Delegates to Caddy (running as Docker service) for production-grade reverse proxy * Delegates to SmartProxy (running as Docker service) for production-grade reverse proxy
* with native SNI support, HTTP/2, WebSocket proxying, and zero-downtime configuration updates. * with TLS termination, WebSocket proxying, and zero-downtime configuration updates.
* *
* Routes use Docker service names (e.g., onebox-hello-world:80) for container-to-container * Routes use Docker service names (e.g., onebox-hello-world:80) for container-to-container
* communication within the Docker overlay network. * communication within the Docker overlay network.
@@ -11,7 +11,7 @@
import { logger } from '../logging.ts'; import { logger } from '../logging.ts';
import { getErrorMessage } from '../utils/error.ts'; import { getErrorMessage } from '../utils/error.ts';
import { OneboxDatabase } from './database.ts'; import { OneboxDatabase } from './database.ts';
import { CaddyManager } from './caddy.ts'; import { SmartProxyManager } from './smartproxy.ts';
interface IProxyRoute { interface IProxyRoute {
domain: string; domain: string;
@@ -24,7 +24,7 @@ interface IProxyRoute {
export class OneboxReverseProxy { export class OneboxReverseProxy {
private oneboxRef: any; private oneboxRef: any;
private database: OneboxDatabase; private database: OneboxDatabase;
private caddy: CaddyManager; private smartProxy: SmartProxyManager;
private routes: Map<string, IProxyRoute> = new Map(); private routes: Map<string, IProxyRoute> = new Map();
private httpPort = 8080; // Default to dev ports (will be overridden if production) private httpPort = 8080; // Default to dev ports (will be overridden if production)
private httpsPort = 8443; private httpsPort = 8443;
@@ -32,33 +32,32 @@ export class OneboxReverseProxy {
constructor(oneboxRef: any) { constructor(oneboxRef: any) {
this.oneboxRef = oneboxRef; this.oneboxRef = oneboxRef;
this.database = oneboxRef.database; this.database = oneboxRef.database;
this.caddy = new CaddyManager({ this.smartProxy = new SmartProxyManager({
httpPort: this.httpPort, httpPort: this.httpPort,
httpsPort: this.httpsPort, httpsPort: this.httpsPort,
}); });
} }
/** /**
* Initialize reverse proxy - Caddy runs as Docker service, no setup needed * Initialize reverse proxy - SmartProxy runs as Docker service, no setup needed
*/ */
async init(): Promise<void> { async init(): Promise<void> {
logger.info('Reverse proxy initialized (Caddy Docker service)'); logger.info('Reverse proxy initialized (SmartProxy Docker service)');
} }
/** /**
* Start the HTTP/HTTPS reverse proxy server * Start the HTTP/HTTPS reverse proxy server
* Caddy handles both HTTP and HTTPS on the configured ports * SmartProxy handles both HTTP and HTTPS on the configured ports
*/ */
async startHttp(port?: number): Promise<void> { async startHttp(port?: number): Promise<void> {
if (port) { if (port) {
this.httpPort = port; this.httpPort = port;
this.caddy.setPorts(this.httpPort, this.httpsPort); this.smartProxy.setPorts(this.httpPort, this.httpsPort);
} }
try { try {
// Start Caddy (handles both HTTP and HTTPS) await this.smartProxy.start();
await this.caddy.start(); logger.success(`Reverse proxy started on port ${this.httpPort} (SmartProxy Docker service)`);
logger.success(`Reverse proxy started on port ${this.httpPort} (Caddy Docker service)`);
} catch (error) { } catch (error) {
logger.error(`Failed to start reverse proxy: ${getErrorMessage(error)}`); logger.error(`Failed to start reverse proxy: ${getErrorMessage(error)}`);
throw error; throw error;
@@ -66,21 +65,19 @@ export class OneboxReverseProxy {
} }
/** /**
* Start HTTPS - Caddy already handles HTTPS when started * Start HTTPS - SmartProxy already handles HTTPS when started
* This method exists for interface compatibility * This method exists for interface compatibility
*/ */
async startHttps(port?: number): Promise<void> { async startHttps(port?: number): Promise<void> {
if (port) { if (port) {
this.httpsPort = port; this.httpsPort = port;
this.caddy.setPorts(this.httpPort, this.httpsPort); this.smartProxy.setPorts(this.httpPort, this.httpsPort);
} }
// Caddy handles both HTTP and HTTPS together const status = this.smartProxy.getStatus();
// If already running, just log and optionally reload with new port
const status = this.caddy.getStatus();
if (status.running) { if (status.running) {
logger.info(`HTTPS already running on port ${this.httpsPort} via Caddy`); logger.info(`HTTPS already running on port ${this.httpsPort} via SmartProxy`);
} else { } else {
await this.caddy.start(); logger.warn('Skipping HTTPS reverse proxy startup because SmartProxy is not running');
} }
} }
@@ -88,13 +85,13 @@ export class OneboxReverseProxy {
* Stop the reverse proxy * Stop the reverse proxy
*/ */
async stop(): Promise<void> { async stop(): Promise<void> {
await this.caddy.stop(); await this.smartProxy.stop();
logger.info('Reverse proxy stopped'); logger.info('Reverse proxy stopped');
} }
/** /**
* Add a route for a service * Add a route for a service
* Uses Docker service name for upstream (Caddy runs in same Docker network) * Uses Docker service name for upstream (SmartProxy runs in same Docker network)
*/ */
async addRoute(serviceId: number, domain: string, targetPort: number): Promise<void> { async addRoute(serviceId: number, domain: string, targetPort: number): Promise<void> {
try { try {
@@ -105,7 +102,7 @@ export class OneboxReverseProxy {
} }
// Use Docker service name as upstream target // Use Docker service name as upstream target
// Caddy runs on the same Docker network, so it can resolve service names directly // SmartProxy runs on the same Docker network, so it can resolve service names directly
const serviceName = `onebox-${service.name}`; const serviceName = `onebox-${service.name}`;
const targetHost = serviceName; const targetHost = serviceName;
@@ -119,9 +116,9 @@ export class OneboxReverseProxy {
this.routes.set(domain, route); this.routes.set(domain, route);
// Add route to Caddy using Docker service name // Add route to SmartProxy using Docker service name
const upstream = `${targetHost}:${targetPort}`; const upstream = `${targetHost}:${targetPort}`;
await this.caddy.addRoute(domain, upstream); await this.smartProxy.addRoute(domain, upstream);
logger.success(`Added proxy route: ${domain} -> ${upstream}`); logger.success(`Added proxy route: ${domain} -> ${upstream}`);
} catch (error) { } catch (error) {
@@ -133,12 +130,9 @@ export class OneboxReverseProxy {
/** /**
* Remove a route * Remove a route
*/ */
removeRoute(domain: string): void { async removeRoute(domain: string): Promise<void> {
if (this.routes.delete(domain)) { if (this.routes.delete(domain)) {
// Remove from Caddy (async but we don't wait) await this.smartProxy.removeRoute(domain);
this.caddy.removeRoute(domain).catch((error) => {
logger.error(`Failed to remove Caddy route for ${domain}: ${getErrorMessage(error)}`);
});
logger.success(`Removed proxy route: ${domain}`); logger.success(`Removed proxy route: ${domain}`);
} else { } else {
logger.warn(`Route not found: ${domain}`); logger.warn(`Route not found: ${domain}`);
@@ -159,9 +153,9 @@ export class OneboxReverseProxy {
try { try {
logger.info('Reloading proxy routes...'); logger.info('Reloading proxy routes...');
// Clear local and Caddy routes // Clear local and SmartProxy routes
this.routes.clear(); this.routes.clear();
this.caddy.clear(); this.smartProxy.clear();
const services = this.database.getAllServices(); const services = this.database.getAllServices();
@@ -181,7 +175,7 @@ export class OneboxReverseProxy {
/** /**
* Add TLS certificate for a domain * Add TLS certificate for a domain
* Sends PEM content to Caddy via Admin API * Sends PEM content to SmartProxy via Admin API
*/ */
async addCertificate(domain: string, certPem: string, keyPem: string): Promise<void> { async addCertificate(domain: string, certPem: string, keyPem: string): Promise<void> {
if (!certPem || !keyPem) { if (!certPem || !keyPem) {
@@ -189,14 +183,14 @@ export class OneboxReverseProxy {
return; return;
} }
await this.caddy.addCertificate(domain, certPem, keyPem); await this.smartProxy.addCertificate(domain, certPem, keyPem);
} }
/** /**
* Remove TLS certificate for a domain * Remove TLS certificate for a domain
*/ */
removeCertificate(domain: string): void { removeCertificate(domain: string): void {
this.caddy.removeCertificate(domain).catch((error) => { this.smartProxy.removeCertificate(domain).catch((error) => {
logger.error(`Failed to remove certificate for ${domain}: ${getErrorMessage(error)}`); logger.error(`Failed to remove certificate for ${domain}: ${getErrorMessage(error)}`);
}); });
} }
@@ -213,13 +207,13 @@ export class OneboxReverseProxy {
for (const cert of certificates) { for (const cert of certificates) {
// Use fullchainPem for the cert (includes intermediates) and keyPem for the key // Use fullchainPem for the cert (includes intermediates) and keyPem for the key
if (cert.domain && cert.fullchainPem && cert.keyPem) { if (cert.domain && cert.fullchainPem && cert.keyPem) {
await this.caddy.addCertificate(cert.domain, cert.fullchainPem, cert.keyPem); await this.smartProxy.addCertificate(cert.domain, cert.fullchainPem, cert.keyPem);
} else { } else {
logger.warn(`Skipping certificate for ${cert.domain}: missing PEM content`); logger.warn(`Skipping certificate for ${cert.domain}: missing PEM content`);
} }
} }
logger.success(`Loaded ${this.caddy.getCertificates().length} TLS certificates`); logger.success(`Loaded ${this.smartProxy.getCertificates().length} TLS certificates`);
} catch (error) { } catch (error) {
logger.error(`Failed to reload certificates: ${getErrorMessage(error)}`); logger.error(`Failed to reload certificates: ${getErrorMessage(error)}`);
throw error; throw error;
@@ -230,19 +224,19 @@ export class OneboxReverseProxy {
* Get status of reverse proxy * Get status of reverse proxy
*/ */
getStatus() { getStatus() {
const caddyStatus = this.caddy.getStatus(); const smartProxyStatus = this.smartProxy.getStatus();
return { return {
http: { http: {
running: caddyStatus.running, running: smartProxyStatus.running,
port: caddyStatus.httpPort, port: smartProxyStatus.httpPort,
}, },
https: { https: {
running: caddyStatus.running, running: smartProxyStatus.running,
port: caddyStatus.httpsPort, port: smartProxyStatus.httpsPort,
certificates: caddyStatus.certificates, certificates: smartProxyStatus.certificates,
}, },
routes: caddyStatus.routes, routes: smartProxyStatus.routes,
backend: 'caddy-docker', backend: 'smartproxy-docker',
}; };
} }
} }
+93 -19
View File
@@ -23,6 +23,35 @@ export class OneboxServicesManager {
this.docker = oneboxRef.docker; this.docker = oneboxRef.docker;
} }
private async broadcastServiceUpdate(
serviceName: string,
action: 'created' | 'updated' | 'deleted' | 'started' | 'stopped',
): Promise<void> {
await this.oneboxRef.opsServer.broadcastServiceUpdate(
serviceName,
action,
this.database.getServiceByName(serviceName),
);
}
private async syncExternalGatewayRoute(service: IService): Promise<void> {
if (!this.oneboxRef.externalGateway) return;
try {
await this.oneboxRef.externalGateway.syncServiceRoute(service);
} catch (error) {
logger.warn(`Failed to sync external gateway route for ${service.domain}: ${getErrorMessage(error)}`);
}
}
private async deleteExternalGatewayRoute(service: Pick<IService, 'id' | 'name' | 'domain'>): Promise<void> {
if (!this.oneboxRef.externalGateway) return;
try {
await this.oneboxRef.externalGateway.deleteServiceRoute(service);
} catch (error) {
logger.warn(`Failed to delete external gateway route for ${service.domain}: ${getErrorMessage(error)}`);
}
}
/** /**
* Deploy a new service (full workflow) * Deploy a new service (full workflow)
*/ */
@@ -50,11 +79,13 @@ export class OneboxServicesManager {
// Build platform requirements // Build platform requirements
const platformRequirements: IPlatformRequirements | undefined = const platformRequirements: IPlatformRequirements | undefined =
(options.enableMongoDB || options.enableS3 || options.enableClickHouse) (options.enableMongoDB || options.enableS3 || options.enableClickHouse || options.enableRedis || options.enableMariaDB)
? { ? {
mongodb: options.enableMongoDB, mongodb: options.enableMongoDB,
s3: options.enableS3, s3: options.enableS3,
clickhouse: options.enableClickHouse, clickhouse: options.enableClickHouse,
redis: options.enableRedis,
mariadb: options.enableMariaDB,
} }
: undefined; : undefined;
@@ -76,6 +107,9 @@ export class OneboxServicesManager {
autoUpdateOnPush: options.autoUpdateOnPush, autoUpdateOnPush: options.autoUpdateOnPush,
// Platform requirements // Platform requirements
platformRequirements, platformRequirements,
// App Store template tracking
appTemplateId: options.appTemplateId,
appTemplateVersion: options.appTemplateVersion,
}); });
// Provision platform resources if needed // Provision platform resources if needed
@@ -96,9 +130,15 @@ export class OneboxServicesManager {
// Merge platform env vars with user-specified env vars (user vars take precedence) // Merge platform env vars with user-specified env vars (user vars take precedence)
const mergedEnvVars = { ...platformEnvVars, ...(options.envVars || {}) }; const mergedEnvVars = { ...platformEnvVars, ...(options.envVars || {}) };
this.resolveEnvVarTemplates(mergedEnvVars, {
...platformEnvVars,
SERVICE_NAME: options.name,
SERVICE_DOMAIN: options.domain || '',
SERVICE_PORT: String(options.port),
});
// Update service with merged env vars // Update service with merged and resolved env vars.
if (Object.keys(platformEnvVars).length > 0) { if (Object.keys(mergedEnvVars).length > 0) {
this.database.updateService(service.id!, { envVars: mergedEnvVars }); this.database.updateService(service.id!, { envVars: mergedEnvVars });
} }
@@ -188,11 +228,15 @@ export class OneboxServicesManager {
// Note: SSL certificates are now handled automatically by CertRequirementManager // Note: SSL certificates are now handled automatically by CertRequirementManager
// which processes pending requirements created above. No direct obtainCertificate call needed. // which processes pending requirements created above. No direct obtainCertificate call needed.
await this.syncExternalGatewayRoute(this.database.getServiceByName(options.name)!);
} }
logger.success(`Service deployed successfully: ${options.name}`); logger.success(`Service deployed successfully: ${options.name}`);
return this.database.getServiceByName(options.name)!; const deployedService = this.database.getServiceByName(options.name)!;
await this.broadcastServiceUpdate(options.name, 'created');
return deployedService;
} catch (error) { } catch (error) {
logger.error(`Failed to deploy service ${options.name}: ${getErrorMessage(error)}`); logger.error(`Failed to deploy service ${options.name}: ${getErrorMessage(error)}`);
throw error; throw error;
@@ -228,15 +272,19 @@ export class OneboxServicesManager {
} catch (routeError) { } catch (routeError) {
logger.warn(`Failed to add proxy route for ${service.domain}: ${getErrorMessage(routeError)}`); logger.warn(`Failed to add proxy route for ${service.domain}: ${getErrorMessage(routeError)}`);
} }
await this.syncExternalGatewayRoute(this.database.getServiceByName(name)!);
} }
logger.success(`Service started: ${name}`); logger.success(`Service started: ${name}`);
await this.broadcastServiceUpdate(name, 'started');
} catch (error) { } catch (error) {
logger.error(`Failed to start service ${name}: ${getErrorMessage(error)}`); logger.error(`Failed to start service ${name}: ${getErrorMessage(error)}`);
this.database.updateService( this.database.updateService(
this.database.getServiceByName(name)?.id!, this.database.getServiceByName(name)?.id!,
{ status: 'failed' } { status: 'failed' }
); );
await this.broadcastServiceUpdate(name, 'updated');
throw error; throw error;
} }
} }
@@ -265,10 +313,12 @@ export class OneboxServicesManager {
// Remove reverse proxy route if service has a domain // Remove reverse proxy route if service has a domain
if (service.domain) { if (service.domain) {
this.oneboxRef.reverseProxy.removeRoute(service.domain); await this.oneboxRef.reverseProxy.removeRoute(service.domain);
await this.deleteExternalGatewayRoute(service);
} }
logger.success(`Service stopped: ${name}`); logger.success(`Service stopped: ${name}`);
await this.broadcastServiceUpdate(name, 'stopped');
} catch (error) { } catch (error) {
logger.error(`Failed to stop service ${name}: ${getErrorMessage(error)}`); logger.error(`Failed to stop service ${name}: ${getErrorMessage(error)}`);
throw error; throw error;
@@ -296,6 +346,7 @@ export class OneboxServicesManager {
this.database.updateService(service.id!, { status: 'running' }); this.database.updateService(service.id!, { status: 'running' });
logger.success(`Service restarted: ${name}`); logger.success(`Service restarted: ${name}`);
await this.broadcastServiceUpdate(name, 'updated');
} catch (error) { } catch (error) {
logger.error(`Failed to restart service ${name}: ${getErrorMessage(error)}`); logger.error(`Failed to restart service ${name}: ${getErrorMessage(error)}`);
throw error; throw error;
@@ -326,11 +377,13 @@ export class OneboxServicesManager {
// Remove reverse proxy route // Remove reverse proxy route
if (service.domain) { if (service.domain) {
try { try {
this.oneboxRef.reverseProxy.removeRoute(service.domain); await this.oneboxRef.reverseProxy.removeRoute(service.domain);
} catch (error) { } catch (error) {
logger.warn(`Failed to remove reverse proxy route: ${getErrorMessage(error)}`); logger.warn(`Failed to remove reverse proxy route: ${getErrorMessage(error)}`);
} }
await this.deleteExternalGatewayRoute(service);
// Note: We don't remove DNS records or SSL certs automatically // Note: We don't remove DNS records or SSL certs automatically
// as they might be used by other services or need manual cleanup // as they might be used by other services or need manual cleanup
} }
@@ -352,6 +405,7 @@ export class OneboxServicesManager {
this.database.deleteService(service.id!); this.database.deleteService(service.id!);
logger.success(`Service removed: ${name}`); logger.success(`Service removed: ${name}`);
await this.oneboxRef.opsServer.broadcastServiceUpdate(name, 'deleted');
} catch (error) { } catch (error) {
logger.error(`Failed to remove service ${name}: ${getErrorMessage(error)}`); logger.error(`Failed to remove service ${name}: ${getErrorMessage(error)}`);
throw error; throw error;
@@ -588,10 +642,12 @@ export class OneboxServicesManager {
// Remove old route if it existed // Remove old route if it existed
if (oldDomain) { if (oldDomain) {
try { try {
this.oneboxRef.reverseProxy.removeRoute(oldDomain); await this.oneboxRef.reverseProxy.removeRoute(oldDomain);
} catch (error) { } catch (error) {
logger.warn(`Failed to remove old reverse proxy route: ${getErrorMessage(error)}`); logger.warn(`Failed to remove old reverse proxy route: ${getErrorMessage(error)}`);
} }
await this.deleteExternalGatewayRoute({ ...service, domain: oldDomain });
} }
// Add new route if domain specified // Add new route if domain specified
@@ -620,7 +676,12 @@ export class OneboxServicesManager {
logger.success(`Service ${name} updated (not started)`); logger.success(`Service ${name} updated (not started)`);
} }
return this.database.getServiceByName(name)!; const refreshedService = this.database.getServiceByName(name)!;
if (refreshedService.domain && refreshedService.status === 'running') {
await this.syncExternalGatewayRoute(refreshedService);
}
await this.broadcastServiceUpdate(name, 'updated');
return refreshedService;
} catch (error) { } catch (error) {
logger.error(`Failed to update service ${name}: ${getErrorMessage(error)}`); logger.error(`Failed to update service ${name}: ${getErrorMessage(error)}`);
throw error; throw error;
@@ -654,11 +715,7 @@ export class OneboxServicesManager {
// Only update and broadcast if status changed // Only update and broadcast if status changed
if (service.status !== ourStatus) { if (service.status !== ourStatus) {
this.database.updateService(service.id!, { status: ourStatus }); this.database.updateService(service.id!, { status: ourStatus });
await this.broadcastServiceUpdate(name, 'updated');
// Broadcast status change via WebSocket
if (this.oneboxRef.httpServer) {
this.oneboxRef.httpServer.broadcastServiceStatus(name, ourStatus);
}
} }
} catch (error) { } catch (error) {
logger.debug(`Failed to sync status for service ${name}: ${getErrorMessage(error)}`); logger.debug(`Failed to sync status for service ${name}: ${getErrorMessage(error)}`);
@@ -676,6 +733,29 @@ export class OneboxServicesManager {
} }
} }
private resolveEnvVarTemplates(
envVarsArg: Record<string, string>,
valuesArg: Record<string, string>,
): void {
for (const [key, value] of Object.entries(envVarsArg)) {
const missingValues = new Set<string>();
const resolvedValue = value.replace(/\$\{([A-Z0-9_]+)\}/g, (match, placeholderName) => {
const replacement = valuesArg[placeholderName];
if (replacement === undefined || replacement === '') {
missingValues.add(placeholderName);
return match;
}
return replacement;
});
if (missingValues.size > 0) {
throw new Error(
`Missing template value(s) for ${key}: ${Array.from(missingValues).join(', ')}`,
);
}
envVarsArg[key] = resolvedValue;
}
}
/** /**
* Start auto-update monitoring for registry services * Start auto-update monitoring for registry services
* Polls every 30 seconds for digest changes and restarts services if needed * Polls every 30 seconds for digest changes and restarts services if needed
@@ -751,12 +831,6 @@ export class OneboxServicesManager {
// Restart service // Restart service
logger.info(`Auto-restarting service: ${service.name}`); logger.info(`Auto-restarting service: ${service.name}`);
await this.restartService(service.name); await this.restartService(service.name);
// Broadcast update via WebSocket
this.oneboxRef.httpServer.broadcastServiceUpdate({
action: 'updated',
service: this.database.getServiceByName(service.name)!,
});
} else if (!service.imageDigest) { } else if (!service.imageDigest) {
// First time - just store the digest // First time - just store the digest
this.database.updateService(service.id!, { this.database.updateService(service.id!, {
+477
View File
@@ -0,0 +1,477 @@
/**
* SmartProxy Manager for Onebox
*
* Manages SmartProxy as a Docker Swarm service so it can route to services on
* the Onebox overlay network.
*/
import * as plugins from '../plugins.ts';
import { logger } from '../logging.ts';
import { getErrorMessage } from '../utils/error.ts';
const SMARTPROXY_SERVICE_NAME = 'onebox-smartproxy';
const LEGACY_CADDY_SERVICE_NAME = 'onebox-caddy';
const SMARTPROXY_IMAGE = 'code.foss.global/host.today/ht-docker-smartproxy:latest';
const SMARTPROXY_ADMIN_CONTAINER_PORT = 3000;
const SMARTPROXY_HTTP_CONTAINER_PORT = 80;
const SMARTPROXY_HTTPS_CONTAINER_PORT = 443;
export interface ISmartProxyRoute {
domain: string;
upstream: string;
}
export interface ISmartProxyCertificate {
domain: string;
certPem: string;
keyPem: string;
}
interface ISmartProxyRouteConfig {
name: string;
match: {
ports: number;
domains: string;
protocol?: 'http' | 'tcp' | 'udp' | 'quic' | 'http3';
};
action: {
type: 'forward';
targets: Array<{ host: string; port: number }>;
tls?: {
mode: 'terminate';
certificate: {
key: string;
cert: string;
};
};
websocket?: {
enabled: boolean;
};
};
priority?: number;
}
export class SmartProxyManager {
private dockerClient: InstanceType<typeof plugins.docker.Docker> | null = null;
private certsDir: string;
private adminUrl: string;
private adminPort: number;
private httpPort: number;
private httpsPort: number;
private routes: Map<string, ISmartProxyRoute> = new Map();
private certificates: Map<string, ISmartProxyCertificate> = new Map();
private networkName = 'onebox-network';
private serviceRunning = false;
constructor(options?: {
certsDir?: string;
adminPort?: number;
httpPort?: number;
httpsPort?: number;
}) {
this.certsDir = options?.certsDir || './.nogit/certs';
this.adminPort = options?.adminPort || 2019;
this.adminUrl = `http://localhost:${this.adminPort}`;
this.httpPort = options?.httpPort || 8080;
this.httpsPort = options?.httpsPort || 8443;
}
private async ensureDockerClient(): Promise<void> {
if (!this.dockerClient) {
this.dockerClient = new plugins.docker.Docker({
socketPath: 'unix:///var/run/docker.sock',
});
await this.dockerClient.start();
}
}
setPorts(httpPort: number, httpsPort: number): void {
this.httpPort = httpPort;
this.httpsPort = httpsPort;
}
async start(): Promise<void> {
if (this.serviceRunning) {
logger.warn('SmartProxy service is already running');
return;
}
try {
await this.ensureDockerClient();
await Deno.mkdir(this.certsDir, { recursive: true });
logger.info('Starting SmartProxy Docker service...');
const legacyService = await this.getExistingService(LEGACY_CADDY_SERVICE_NAME);
if (legacyService) {
logger.info('Legacy Caddy service exists, removing it before SmartProxy startup...');
await this.removeService(LEGACY_CADDY_SERVICE_NAME);
await new Promise((resolve) => setTimeout(resolve, 2000));
}
const existingService = await this.getExistingService();
if (existingService) {
logger.info('SmartProxy service exists, removing old service...');
await this.removeService();
await new Promise((resolve) => setTimeout(resolve, 2000));
}
const networkId = await this.getNetworkId();
const response = await this.dockerClient!.request('POST', '/services/create', {
Name: SMARTPROXY_SERVICE_NAME,
Labels: {
'managed-by': 'onebox',
'onebox-type': 'smartproxy',
},
TaskTemplate: {
ContainerSpec: {
Image: SMARTPROXY_IMAGE,
Env: [
'SMARTPROXY_ADMIN_HOST=0.0.0.0',
`SMARTPROXY_ADMIN_PORT=${SMARTPROXY_ADMIN_CONTAINER_PORT}`,
],
},
Networks: [
{
Target: networkId,
},
],
RestartPolicy: {
Condition: 'any',
MaxAttempts: 0,
},
},
Mode: {
Replicated: {
Replicas: 1,
},
},
EndpointSpec: {
Ports: [
{
Protocol: 'tcp',
TargetPort: SMARTPROXY_HTTP_CONTAINER_PORT,
PublishedPort: this.httpPort,
PublishMode: 'host',
},
{
Protocol: 'tcp',
TargetPort: SMARTPROXY_HTTPS_CONTAINER_PORT,
PublishedPort: this.httpsPort,
PublishMode: 'host',
},
{
Protocol: 'tcp',
TargetPort: SMARTPROXY_ADMIN_CONTAINER_PORT,
PublishedPort: this.adminPort,
PublishMode: 'host',
},
],
},
});
if (response.statusCode >= 300) {
throw new Error(`Failed to create SmartProxy service: HTTP ${response.statusCode} - ${JSON.stringify(response.body)}`);
}
logger.info(`SmartProxy service created: ${response.body.ID}`);
await this.waitForReady();
this.serviceRunning = true;
await this.reloadConfig();
logger.success(`SmartProxy started (HTTP: ${this.httpPort}, HTTPS: ${this.httpsPort}, Admin: ${this.adminUrl})`);
} catch (error) {
logger.error(`Failed to start SmartProxy: ${getErrorMessage(error)}`);
throw error;
}
}
private async getExistingService(serviceNameArg = SMARTPROXY_SERVICE_NAME): Promise<any | null> {
try {
const response = await this.dockerClient!.request('GET', `/services/${serviceNameArg}`, {});
if (response.statusCode === 200) {
return response.body;
}
return null;
} catch {
return null;
}
}
private async removeService(serviceNameArg = SMARTPROXY_SERVICE_NAME): Promise<void> {
try {
await this.dockerClient!.request('DELETE', `/services/${serviceNameArg}`, {});
} catch {
// Service may not exist.
}
}
private async getNetworkId(): Promise<string> {
const networks = await this.dockerClient!.listNetworks();
const network = networks.find((n: any) => n.Name === this.networkName);
if (!network) {
throw new Error(`Network not found: ${this.networkName}`);
}
return network.Id;
}
private async waitForReady(maxAttempts = 10, intervalMs = 1000): Promise<void> {
for (let i = 0; i < maxAttempts; i++) {
try {
const response = await fetch(`${this.adminUrl}/ready`);
if (response.ok) {
return;
}
} catch {
// Not ready yet.
}
await new Promise((resolve) => setTimeout(resolve, intervalMs));
}
throw new Error('SmartProxy service failed to start within timeout');
}
async stop(): Promise<void> {
try {
await this.ensureDockerClient();
if (!this.serviceRunning && !(await this.getExistingService())) {
return;
}
logger.info('Stopping SmartProxy service...');
await this.removeService();
this.serviceRunning = false;
logger.info('SmartProxy service stopped');
} catch (error) {
logger.error(`Failed to stop SmartProxy: ${getErrorMessage(error)}`);
} finally {
if (this.dockerClient) {
try {
await this.dockerClient.stop();
} catch (error) {
logger.error(`Failed to stop SmartProxy Docker client: ${getErrorMessage(error)}`);
} finally {
this.dockerClient = null;
}
}
}
}
async isHealthy(): Promise<boolean> {
try {
const response = await fetch(`${this.adminUrl}/health`);
return response.ok;
} catch {
return false;
}
}
async isRunning(): Promise<boolean> {
try {
await this.ensureDockerClient();
const service = await this.getExistingService();
if (!service) return false;
const tasksResponse = await this.dockerClient!.request(
'GET',
`/tasks?filters=${encodeURIComponent(JSON.stringify({ service: [SMARTPROXY_SERVICE_NAME] }))}`,
{},
);
if (tasksResponse.statusCode !== 200) return false;
const tasks = tasksResponse.body;
return tasks.some((task: any) => task.Status?.State === 'running');
} catch {
return false;
}
}
private routeName(prefixArg: string, domainArg: string): string {
return `${prefixArg}-${domainArg.replace(/[^a-zA-Z0-9]+/g, '-').replace(/^-|-$/g, '')}`;
}
private parseUpstream(upstreamArg: string): { host: string; port: number } {
const separatorIndex = upstreamArg.lastIndexOf(':');
if (separatorIndex <= 0 || separatorIndex === upstreamArg.length - 1) {
throw new Error(`Invalid upstream target: ${upstreamArg}`);
}
const host = upstreamArg.slice(0, separatorIndex);
const port = Number(upstreamArg.slice(separatorIndex + 1));
if (!Number.isInteger(port) || port < 1 || port > 65535) {
throw new Error(`Invalid upstream port in target: ${upstreamArg}`);
}
return { host, port };
}
private buildRoutes(): ISmartProxyRouteConfig[] {
const routeConfigs: ISmartProxyRouteConfig[] = [];
for (const [domain, route] of this.routes) {
const target = this.parseUpstream(route.upstream);
const baseAction = {
type: 'forward' as const,
targets: [target],
websocket: {
enabled: true,
},
};
routeConfigs.push({
name: this.routeName('http', domain),
match: {
ports: SMARTPROXY_HTTP_CONTAINER_PORT,
domains: domain,
protocol: 'http',
},
action: baseAction,
priority: 10,
});
const certificate = this.certificates.get(domain);
if (certificate) {
routeConfigs.push({
name: this.routeName('https', domain),
match: {
ports: SMARTPROXY_HTTPS_CONTAINER_PORT,
domains: domain,
protocol: 'http',
},
action: {
...baseAction,
tls: {
mode: 'terminate',
certificate: {
key: certificate.keyPem,
cert: certificate.certPem,
},
},
},
priority: 20,
});
}
}
return routeConfigs;
}
async reloadConfig(): Promise<void> {
const isRunning = await this.isRunning();
if (!isRunning) {
logger.warn('SmartProxy not running, cannot reload config');
return;
}
const routes = this.buildRoutes();
try {
const response = await fetch(`${this.adminUrl}/routes`, {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ routes }),
});
if (!response.ok) {
const text = await response.text();
throw new Error(`Failed to reload SmartProxy routes: ${response.status} ${text}`);
}
logger.debug('SmartProxy routes reloaded');
} catch (error) {
logger.error(`Failed to reload SmartProxy routes: ${getErrorMessage(error)}`);
throw error;
}
}
async addRoute(domain: string, upstream: string): Promise<void> {
this.routes.set(domain, { domain, upstream });
if (await this.isRunning()) {
await this.reloadConfig();
}
logger.success(`Added SmartProxy route: ${domain} -> ${upstream}`);
}
async removeRoute(domain: string): Promise<void> {
if (this.routes.delete(domain)) {
if (await this.isRunning()) {
await this.reloadConfig();
}
logger.success(`Removed SmartProxy route: ${domain}`);
}
}
async addCertificate(domain: string, certPem: string, keyPem: string): Promise<void> {
this.certificates.set(domain, {
domain,
certPem,
keyPem,
});
try {
await Deno.mkdir(this.certsDir, { recursive: true });
await Deno.writeTextFile(`${this.certsDir}/${domain}.crt`, certPem);
await Deno.writeTextFile(`${this.certsDir}/${domain}.key`, keyPem);
} catch (error) {
logger.warn(`Failed to write certificate backup for ${domain}: ${getErrorMessage(error)}`);
}
if (await this.isRunning()) {
await this.reloadConfig();
}
logger.success(`Added TLS certificate for ${domain}`);
}
async removeCertificate(domain: string): Promise<void> {
if (this.certificates.delete(domain)) {
try {
await Deno.remove(`${this.certsDir}/${domain}.crt`);
await Deno.remove(`${this.certsDir}/${domain}.key`);
} catch {
// Files may not exist.
}
if (await this.isRunning()) {
await this.reloadConfig();
}
logger.success(`Removed TLS certificate for ${domain}`);
}
}
getRoutes(): ISmartProxyRoute[] {
return Array.from(this.routes.values());
}
getCertificates(): ISmartProxyCertificate[] {
return Array.from(this.certificates.values());
}
clear(): void {
this.routes.clear();
this.certificates.clear();
}
getStatus(): {
running: boolean;
httpPort: number;
httpsPort: number;
routes: number;
certificates: number;
} {
return {
running: this.serviceRunning,
httpPort: this.httpPort,
httpsPort: this.httpsPort,
routes: this.routes.size,
certificates: this.certificates.size,
};
}
}
+2 -2
View File
@@ -39,11 +39,11 @@ export class OneboxSslManager {
this.acmeEmail = acmeEmail; this.acmeEmail = acmeEmail;
// Get Cloudflare API key (reuse from DNS manager) // Get Cloudflare API key (reuse from DNS manager)
const cfApiKey = this.database.getSetting('cloudflareAPIKey'); const cfApiKey = await this.database.getSecretSetting('cloudflareToken');
if (!cfApiKey) { if (!cfApiKey) {
logger.warn('Cloudflare API key not configured. SSL certificate management will be limited.'); logger.warn('Cloudflare API key not configured. SSL certificate management will be limited.');
logger.info('Configure with: onebox config set cloudflareAPIKey <key>'); logger.info('Configure with: onebox config set cloudflareToken <key>');
return; return;
} }
+207 -14
View File
@@ -8,16 +8,17 @@ import { getErrorMessage } from './utils/error.ts';
import { Onebox } from './classes/onebox.ts'; import { Onebox } from './classes/onebox.ts';
import { OneboxDaemon } from './classes/daemon.ts'; import { OneboxDaemon } from './classes/daemon.ts';
import { OneboxSystemd } from './classes/systemd.ts'; import { OneboxSystemd } from './classes/systemd.ts';
import type { IAppVersionConfig } from './classes/appstore-types.ts';
export async function runCli(): Promise<void> { export async function runCli(): Promise<void> {
const args = Deno.args; const args = Deno.args;
if (args.length === 0 || args.includes('--help') || args.includes('-h')) { if (args.length === 0 || (args.length === 1 && (args[0] === '--help' || args[0] === '-h'))) {
printHelp(); printHelp();
return; return;
} }
if (args.includes('--version') || args.includes('-v')) { if (args.length === 1 && (args[0] === '--version' || args[0] === '-v')) {
console.log(`${projectInfo.name} v${projectInfo.version}`); console.log(`${projectInfo.name} v${projectInfo.version}`);
return; return;
} }
@@ -70,6 +71,11 @@ export async function runCli(): Promise<void> {
await handleSslCommand(onebox, subcommand, args.slice(2)); await handleSslCommand(onebox, subcommand, args.slice(2));
break; break;
case 'appstore':
await handleAppStoreCommand(onebox, subcommand, args.slice(2));
break;
case 'proxy':
case 'nginx': case 'nginx':
await handleNginxCommand(onebox, subcommand, args.slice(2)); await handleNginxCommand(onebox, subcommand, args.slice(2));
break; break;
@@ -104,12 +110,11 @@ async function handleServiceCommand(onebox: Onebox, subcommand: string, args: st
const image = getArg(args, '--image'); const image = getArg(args, '--image');
const domain = getArg(args, '--domain'); const domain = getArg(args, '--domain');
const port = parseInt(getArg(args, '--port') || '80', 10); const port = parseInt(getArg(args, '--port') || '80', 10);
const envArgs = args.filter((a) => a.startsWith('--env=')).map((a) => a.slice(6)); const envVars = parseEnvArgs(args);
const envVars: Record<string, string> = {};
for (const env of envArgs) { requireValue(name, 'service name');
const [key, value] = env.split('='); requireValue(image, '--image');
envVars[key] = value; assertValidPort(port, '--port');
}
await onebox.services.deployService({ name, image, port, domain, envVars }); await onebox.services.deployService({ name, image, port, domain, envVars });
break; break;
@@ -158,6 +163,7 @@ async function handleRegistryCommand(onebox: Onebox, subcommand: string, args: s
const url = getArg(args, '--url'); const url = getArg(args, '--url');
const username = getArg(args, '--username'); const username = getArg(args, '--username');
const password = getArg(args, '--password'); const password = getArg(args, '--password');
requireValue(url, '--url');
await onebox.registries.addRegistry(url, username, password); await onebox.registries.addRegistry(url, username, password);
break; break;
} }
@@ -180,6 +186,76 @@ async function handleRegistryCommand(onebox: Onebox, subcommand: string, args: s
} }
} }
// App Store commands
async function handleAppStoreCommand(onebox: Onebox, subcommand: string, args: string[]) {
switch (subcommand) {
case 'list': {
const apps = await onebox.appStore.getApps();
logger.table(
['ID', 'Name', 'Category', 'Latest'],
apps.map((app) => [app.id, app.name, app.category, app.latestVersion])
);
break;
}
case 'config': {
const appId = args[0];
requireValue(appId, 'app id');
const appMeta = await onebox.appStore.getAppMeta(appId);
const version = getArg(args, '--version') || appMeta.latestVersion;
const config = await onebox.appStore.getAppVersionConfig(appId, version);
console.log(JSON.stringify({ appMeta, version, config }, null, 2));
break;
}
case 'install': {
const appId = args[0];
requireValue(appId, 'app id');
const appMeta = await onebox.appStore.getAppMeta(appId);
const version = getArg(args, '--version') || appMeta.latestVersion;
const config = await onebox.appStore.getAppVersionConfig(appId, version);
const serviceName = getArg(args, '--name') || appId;
const domain = getArg(args, '--domain');
const port = parseInt(getArg(args, '--port') || String(config.port), 10);
const envVars = getAppStoreEnvVars(config, parseEnvArgs(args));
const autoDNS = getBooleanArg(args, '--auto-dns', true);
requireValue(serviceName, '--name');
assertValidPort(port, '--port');
if (requiresTemplateValue(envVars, 'SERVICE_DOMAIN')) {
requireValue(domain, '--domain');
}
const service = await onebox.services.deployService({
name: serviceName,
image: config.image,
port,
domain,
autoDNS,
envVars,
enableMongoDB: Boolean(config.platformRequirements?.mongodb),
enableS3: Boolean(config.platformRequirements?.s3),
enableClickHouse: Boolean(config.platformRequirements?.clickhouse),
enableRedis: Boolean(config.platformRequirements?.redis),
enableMariaDB: Boolean(config.platformRequirements?.mariadb),
appTemplateId: appId,
appTemplateVersion: version,
});
logger.success(`Installed ${appMeta.name} ${version} as ${service.name}`);
if (service.domain) {
logger.info(`Route: https://${service.domain}`);
}
break;
}
default:
logger.error(`Unknown appstore subcommand: ${subcommand}`);
logger.info('Available: list, config, install');
}
}
// DNS commands // DNS commands
async function handleDnsCommand(onebox: Onebox, subcommand: string, args: string[]) { async function handleDnsCommand(onebox: Onebox, subcommand: string, args: string[]) {
switch (subcommand) { switch (subcommand) {
@@ -382,7 +458,17 @@ async function handleSystemdCommand(subcommand: string, _args: string[]) {
async function handleConfigCommand(onebox: Onebox, subcommand: string, args: string[]) { async function handleConfigCommand(onebox: Onebox, subcommand: string, args: string[]) {
switch (subcommand) { switch (subcommand) {
case 'show': { case 'show': {
for (const secretKey of onebox.database.getCanonicalSecretSettingKeys()) {
await onebox.database.getSecretSetting(secretKey);
}
const settings = onebox.database.getAllSettings(); const settings = onebox.database.getAllSettings();
for (const secretKey of onebox.database.getCanonicalSecretSettingKeys()) {
if (await onebox.database.hasSecretSetting(secretKey)) {
settings[secretKey] = '********';
}
}
logger.table( logger.table(
['Key', 'Value'], ['Key', 'Value'],
Object.entries(settings).map(([k, v]) => [k, v]) Object.entries(settings).map(([k, v]) => [k, v])
@@ -391,7 +477,11 @@ async function handleConfigCommand(onebox: Onebox, subcommand: string, args: str
} }
case 'set': case 'set':
onebox.database.setSetting(args[0], args[1]); if (onebox.database.isSecretSettingKey(args[0])) {
await onebox.database.setSecretSetting(args[0], args[1]);
} else {
onebox.database.setSetting(args[0], args[1]);
}
logger.success(`Setting ${args[0]} updated`); logger.success(`Setting ${args[0]} updated`);
break; break;
@@ -480,8 +570,106 @@ async function handleUpgradeCommand(): Promise<void> {
// Helpers // Helpers
function getArg(args: string[], flag: string): string { function getArg(args: string[], flag: string): string {
const arg = args.find((a) => a.startsWith(`${flag}=`)); for (let i = 0; i < args.length; i++) {
return arg ? arg.split('=')[1] : ''; const arg = args[i];
if (arg.startsWith(`${flag}=`)) {
return arg.slice(flag.length + 1);
}
if (arg === flag) {
const value = args[i + 1];
return value && !value.startsWith('--') ? value : '';
}
}
return '';
}
function getRepeatedArgs(args: string[], flag: string): string[] {
const values: string[] = [];
for (let i = 0; i < args.length; i++) {
const arg = args[i];
if (arg.startsWith(`${flag}=`)) {
values.push(arg.slice(flag.length + 1));
continue;
}
if (arg === flag) {
const value = args[i + 1];
if (value && !value.startsWith('--')) {
values.push(value);
i++;
}
}
}
return values;
}
function getBooleanArg(args: string[], flag: string, defaultValue: boolean): boolean {
if (args.includes(`--no-${flag.slice(2)}`)) {
return false;
}
const value = getArg(args, flag);
if (!value) {
return args.includes(flag) ? true : defaultValue;
}
return !['0', 'false', 'no', 'off'].includes(value.toLowerCase());
}
function parseEnvArgs(args: string[]): Record<string, string> {
const envVars: Record<string, string> = {};
for (const envArg of getRepeatedArgs(args, '--env')) {
const separatorIndex = envArg.indexOf('=');
if (separatorIndex === -1) {
throw new Error(`Invalid --env value '${envArg}'. Expected KEY=VALUE.`);
}
const key = envArg.slice(0, separatorIndex);
const value = envArg.slice(separatorIndex + 1);
requireValue(key, '--env key');
envVars[key] = value;
}
return envVars;
}
function getAppStoreEnvVars(
configArg: IAppVersionConfig,
overridesArg: Record<string, string>,
): Record<string, string> {
const envVars: Record<string, string> = {};
const missingRequiredEnvVars: string[] = [];
for (const envVar of configArg.envVars || []) {
const value = overridesArg[envVar.key] ?? envVar.value ?? '';
if (envVar.required && !value) {
missingRequiredEnvVars.push(envVar.key);
}
envVars[envVar.key] = value;
}
for (const [key, value] of Object.entries(overridesArg)) {
envVars[key] = value;
}
if (missingRequiredEnvVars.length > 0) {
throw new Error(
`Missing required app env var(s): ${missingRequiredEnvVars.join(', ')}. Use --env KEY=VALUE.`
);
}
return envVars;
}
function requiresTemplateValue(envVarsArg: Record<string, string>, templateNameArg: string): boolean {
return Object.values(envVarsArg).some((value) => value.includes(`\${${templateNameArg}}`));
}
function requireValue(valueArg: string | undefined, labelArg: string): asserts valueArg is string {
if (!valueArg) {
throw new Error(`Missing required ${labelArg}`);
}
}
function assertValidPort(portArg: number, labelArg: string): void {
if (!Number.isInteger(portArg) || portArg <= 0 || portArg > 65535) {
throw new Error(`Invalid ${labelArg}: ${portArg}`);
}
} }
function printHelp(): void { function printHelp(): void {
@@ -518,9 +706,13 @@ Commands:
ssl list ssl list
ssl force-renew <domain> ssl force-renew <domain>
nginx reload appstore list
nginx test appstore config <app-id> [--version <version>]
nginx status appstore install <app-id> --name <name> [--domain <domain>] [--version <version>] [--env KEY=VALUE]
proxy reload # nginx alias is still supported
proxy test
proxy status
systemd enable Install and enable systemd service systemd enable Install and enable systemd service
systemd disable Stop, disable, and remove systemd service systemd disable Stop, disable, and remove systemd service
@@ -554,6 +746,7 @@ Production Workflow:
Examples: Examples:
onebox server --ephemeral # Start dev server onebox server --ephemeral # Start dev server
onebox service add myapp --image nginx:latest --domain app.example.com --port 80 onebox service add myapp --image nginx:latest --domain app.example.com --port 80
onebox appstore install cloudly --name cloudly --domain cloudly.example.com --env SERVEZONE_ADMINACCOUNT=admin:password
onebox registry add --url registry.example.com --username user --password pass onebox registry add --url registry.example.com --username user --password pass
onebox systemd enable onebox systemd enable
onebox systemd start onebox systemd start
+40 -1
View File
@@ -26,6 +26,7 @@ import type { TBindValue } from './types.ts';
import { logger } from '../logging.ts'; import { logger } from '../logging.ts';
import { getErrorMessage } from '../utils/error.ts'; import { getErrorMessage } from '../utils/error.ts';
import { MigrationRunner } from './migrations/index.ts'; import { MigrationRunner } from './migrations/index.ts';
import { SecretSettingsManager } from './secret-settings.ts';
// Import repositories // Import repositories
import { import {
@@ -50,6 +51,7 @@ export class OneboxDatabase {
private metricsRepo!: MetricsRepository; private metricsRepo!: MetricsRepository;
private platformRepo!: PlatformRepository; private platformRepo!: PlatformRepository;
private backupRepo!: BackupRepository; private backupRepo!: BackupRepository;
public secretSettings!: SecretSettingsManager;
constructor(dbPath = './.nogit/onebox.db') { constructor(dbPath = './.nogit/onebox.db') {
this.dbPath = dbPath; this.dbPath = dbPath;
@@ -84,6 +86,7 @@ export class OneboxDatabase {
this.metricsRepo = new MetricsRepository(queryFn); this.metricsRepo = new MetricsRepository(queryFn);
this.platformRepo = new PlatformRepository(queryFn); this.platformRepo = new PlatformRepository(queryFn);
this.backupRepo = new BackupRepository(queryFn); this.backupRepo = new BackupRepository(queryFn);
this.secretSettings = new SecretSettingsManager(this.authRepo);
} catch (error) { } catch (error) {
logger.error(`Failed to initialize database: ${getErrorMessage(error)}`); logger.error(`Failed to initialize database: ${getErrorMessage(error)}`);
throw error; throw error;
@@ -229,6 +232,14 @@ export class OneboxDatabase {
) )
`); `);
this.query(`
CREATE TABLE IF NOT EXISTS secret_settings (
key TEXT PRIMARY KEY,
value TEXT NOT NULL,
updated_at INTEGER NOT NULL
)
`);
// Version table for migrations // Version table for migrations
this.query(` this.query(`
CREATE TABLE IF NOT EXISTS migrations ( CREATE TABLE IF NOT EXISTS migrations (
@@ -333,10 +344,34 @@ export class OneboxDatabase {
this.authRepo.setSetting(key, value); this.authRepo.setSetting(key, value);
} }
deleteSetting(key: string): void {
this.authRepo.deleteSetting(key);
}
getAllSettings(): Record<string, string> { getAllSettings(): Record<string, string> {
return this.authRepo.getAllSettings(); return this.authRepo.getAllSettings();
} }
async getSecretSetting(key: string): Promise<string | null> {
return await this.secretSettings.get(key);
}
async setSecretSetting(key: string, value: string | null): Promise<void> {
await this.secretSettings.set(key, value);
}
async hasSecretSetting(key: string): Promise<boolean> {
return await this.secretSettings.has(key);
}
isSecretSettingKey(key: string): boolean {
return this.secretSettings.isSecretKey(key);
}
getCanonicalSecretSettingKeys(): string[] {
return this.secretSettings.getCanonicalKeys();
}
// ============ Users CRUD (delegated to repository) ============ // ============ Users CRUD (delegated to repository) ============
async createUser(user: Omit<IUser, 'id'>): Promise<IUser> { async createUser(user: Omit<IUser, 'id'>): Promise<IUser> {
@@ -419,7 +454,7 @@ export class OneboxDatabase {
return this.certificateRepo.getAllDomains(); return this.certificateRepo.getAllDomains();
} }
getDomainsByProvider(provider: 'cloudflare' | 'manual'): IDomain[] { getDomainsByProvider(provider: NonNullable<IDomain['dnsProvider']>): IDomain[] {
return this.certificateRepo.getDomainsByProvider(provider); return this.certificateRepo.getDomainsByProvider(provider);
} }
@@ -607,6 +642,10 @@ export class OneboxDatabase {
return this.backupRepo.getBySchedule(scheduleId); return this.backupRepo.getBySchedule(scheduleId);
} }
getBackupBySnapshotId(snapshotId: string): IBackup | null {
return this.backupRepo.getBySnapshotId(snapshotId);
}
// ============ Backup Schedules (delegated to repository) ============ // ============ Backup Schedules (delegated to repository) ============
createBackupSchedule(schedule: Omit<IBackupSchedule, 'id'>): IBackupSchedule { createBackupSchedule(schedule: Omit<IBackupSchedule, 'id'>): IBackupSchedule {
@@ -0,0 +1,12 @@
import { BaseMigration } from './base-migration.ts';
import type { TQueryFunction } from '../types.ts';
export class Migration013AppTemplateVersion extends BaseMigration {
readonly version = 13;
readonly description = 'Add app template tracking columns to services';
up(query: TQueryFunction): void {
query('ALTER TABLE services ADD COLUMN app_template_id TEXT');
query('ALTER TABLE services ADD COLUMN app_template_version TEXT');
}
}
@@ -0,0 +1,13 @@
import { BaseMigration } from './base-migration.ts';
import type { TQueryFunction } from '../types.ts';
export class Migration014ContainerArchive extends BaseMigration {
readonly version = 14;
readonly description = 'Add containerarchive snapshot tracking to backups';
up(query: TQueryFunction): void {
query('ALTER TABLE backups ADD COLUMN snapshot_id TEXT');
query('ALTER TABLE backups ADD COLUMN stored_size_bytes INTEGER DEFAULT 0');
query('CREATE INDEX IF NOT EXISTS idx_backups_snapshot ON backups(snapshot_id)');
}
}
@@ -0,0 +1,31 @@
import { BaseMigration } from './base-migration.ts';
import type { TQueryFunction } from '../types.ts';
export class Migration015SmartProxyPlatformService extends BaseMigration {
readonly version = 15;
readonly description = 'Rename Caddy platform service to SmartProxy';
up(query: TQueryFunction): void {
query(
`UPDATE platform_services
SET name = 'onebox-smartproxy',
type = 'smartproxy',
container_id = CASE
WHEN container_id = 'onebox-caddy' THEN 'onebox-smartproxy'
ELSE container_id
END,
config = ?,
updated_at = ?
WHERE type = 'caddy'`,
[
JSON.stringify({
image: 'code.foss.global/host.today/ht-docker-smartproxy:latest',
port: 80,
volumes: [],
environment: {},
}),
Date.now(),
],
);
}
}
@@ -19,6 +19,9 @@ import { Migration009BackupSystem } from './migration-009-backup-system.ts';
import { Migration010BackupSchedules } from './migration-010-backup-schedules.ts'; import { Migration010BackupSchedules } from './migration-010-backup-schedules.ts';
import { Migration011ScopeColumns } from './migration-011-scope-columns.ts'; import { Migration011ScopeColumns } from './migration-011-scope-columns.ts';
import { Migration012GfsRetention } from './migration-012-gfs-retention.ts'; import { Migration012GfsRetention } from './migration-012-gfs-retention.ts';
import { Migration013AppTemplateVersion } from './migration-013-app-template-version.ts';
import { Migration014ContainerArchive } from './migration-014-containerarchive.ts';
import { Migration015SmartProxyPlatformService } from './migration-015-smartproxy-platform-service.ts';
import type { BaseMigration } from './base-migration.ts'; import type { BaseMigration } from './base-migration.ts';
export class MigrationRunner { export class MigrationRunner {
@@ -42,6 +45,9 @@ export class MigrationRunner {
new Migration010BackupSchedules(), new Migration010BackupSchedules(),
new Migration011ScopeColumns(), new Migration011ScopeColumns(),
new Migration012GfsRetention(), new Migration012GfsRetention(),
new Migration013AppTemplateVersion(),
new Migration014ContainerArchive(),
new Migration015SmartProxyPlatformService(),
].sort((a, b) => a.version - b.version); ].sort((a, b) => a.version - b.version);
} }
@@ -70,6 +70,10 @@ export class AuthRepository extends BaseRepository {
); );
} }
deleteSetting(key: string): void {
this.query('DELETE FROM settings WHERE key = ?', [key]);
}
getAllSettings(): Record<string, string> { getAllSettings(): Record<string, string> {
const rows = this.query('SELECT key, value FROM settings'); const rows = this.query('SELECT key, value FROM settings');
const settings: Record<string, string> = {}; const settings: Record<string, string> = {};
@@ -80,4 +84,24 @@ export class AuthRepository extends BaseRepository {
} }
return settings; return settings;
} }
getSecretSetting(key: string): string | null {
const rows = this.query('SELECT value FROM secret_settings WHERE key = ?', [key]);
if (rows.length === 0) return null;
const value = (rows[0] as any).value || rows[0][0];
return value ? String(value) : null;
}
setSecretSetting(key: string, value: string): void {
const now = Date.now();
this.query(
'INSERT OR REPLACE INTO secret_settings (key, value, updated_at) VALUES (?, ?, ?)',
[key, value, now],
);
}
deleteSecretSetting(key: string): void {
this.query('DELETE FROM secret_settings WHERE key = ?', [key]);
}
} }
+15 -2
View File
@@ -20,8 +20,9 @@ export class BackupRepository extends BaseRepository {
this.query( this.query(
`INSERT INTO backups ( `INSERT INTO backups (
service_id, service_name, filename, size_bytes, created_at, service_id, service_name, filename, size_bytes, created_at,
includes_image, platform_resources, checksum, schedule_id includes_image, platform_resources, checksum, schedule_id,
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`, snapshot_id, stored_size_bytes
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
[ [
backup.serviceId, backup.serviceId,
backup.serviceName, backup.serviceName,
@@ -32,6 +33,8 @@ export class BackupRepository extends BaseRepository {
JSON.stringify(backup.platformResources), JSON.stringify(backup.platformResources),
backup.checksum, backup.checksum,
backup.scheduleId ?? null, backup.scheduleId ?? null,
backup.snapshotId ?? null,
backup.storedSizeBytes ?? 0,
] ]
); );
@@ -78,6 +81,14 @@ export class BackupRepository extends BaseRepository {
return rows.map((row) => this.rowToBackup(row)); return rows.map((row) => this.rowToBackup(row));
} }
getBySnapshotId(snapshotId: string): IBackup | null {
const rows = this.query(
'SELECT * FROM backups WHERE snapshot_id = ?',
[snapshotId]
);
return rows.length > 0 ? this.rowToBackup(rows[0]) : null;
}
private rowToBackup(row: any): IBackup { private rowToBackup(row: any): IBackup {
let platformResources: TPlatformServiceType[] = []; let platformResources: TPlatformServiceType[] = [];
const platformResourcesRaw = row.platform_resources; const platformResourcesRaw = row.platform_resources;
@@ -94,7 +105,9 @@ export class BackupRepository extends BaseRepository {
serviceId: Number(row.service_id), serviceId: Number(row.service_id),
serviceName: String(row.service_name), serviceName: String(row.service_name),
filename: String(row.filename), filename: String(row.filename),
snapshotId: row.snapshot_id ? String(row.snapshot_id) : undefined,
sizeBytes: Number(row.size_bytes), sizeBytes: Number(row.size_bytes),
storedSizeBytes: row.stored_size_bytes ? Number(row.stored_size_bytes) : undefined,
createdAt: Number(row.created_at), createdAt: Number(row.created_at),
includesImage: Boolean(row.includes_image), includesImage: Boolean(row.includes_image),
platformResources, platformResources,
@@ -43,7 +43,7 @@ export class CertificateRepository extends BaseRepository {
return rows.map((row) => this.rowToDomain(row)); return rows.map((row) => this.rowToDomain(row));
} }
getDomainsByProvider(provider: 'cloudflare' | 'manual'): IDomain[] { getDomainsByProvider(provider: NonNullable<IDomain['dnsProvider']>): IDomain[] {
const rows = this.query('SELECT * FROM domains WHERE dns_provider = ? ORDER BY domain ASC', [provider]); const rows = this.query('SELECT * FROM domains WHERE dns_provider = ? ORDER BY domain ASC', [provider]);
return rows.map((row) => this.rowToDomain(row)); return rows.map((row) => this.rowToDomain(row));
} }
+15 -2
View File
@@ -17,8 +17,9 @@ export class ServiceRepository extends BaseRepository {
name, image, registry, env_vars, port, domain, container_id, status, name, image, registry, env_vars, port, domain, container_id, status,
created_at, updated_at, created_at, updated_at,
use_onebox_registry, registry_repository, registry_image_tag, use_onebox_registry, registry_repository, registry_image_tag,
auto_update_on_push, image_digest, platform_requirements auto_update_on_push, image_digest, platform_requirements,
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, app_template_id, app_template_version
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
[ [
service.name, service.name,
service.image, service.image,
@@ -36,6 +37,8 @@ export class ServiceRepository extends BaseRepository {
service.autoUpdateOnPush ? 1 : 0, service.autoUpdateOnPush ? 1 : 0,
service.imageDigest || null, service.imageDigest || null,
JSON.stringify(service.platformRequirements || {}), JSON.stringify(service.platformRequirements || {}),
service.appTemplateId || null,
service.appTemplateVersion || null,
] ]
); );
@@ -123,6 +126,14 @@ export class ServiceRepository extends BaseRepository {
fields.push('include_image_in_backup = ?'); fields.push('include_image_in_backup = ?');
values.push(updates.includeImageInBackup ? 1 : 0); values.push(updates.includeImageInBackup ? 1 : 0);
} }
if (updates.appTemplateId !== undefined) {
fields.push('app_template_id = ?');
values.push(updates.appTemplateId);
}
if (updates.appTemplateVersion !== undefined) {
fields.push('app_template_version = ?');
values.push(updates.appTemplateVersion);
}
fields.push('updated_at = ?'); fields.push('updated_at = ?');
values.push(Date.now()); values.push(Date.now());
@@ -179,6 +190,8 @@ export class ServiceRepository extends BaseRepository {
includeImageInBackup: row.include_image_in_backup !== undefined includeImageInBackup: row.include_image_in_backup !== undefined
? Boolean(row.include_image_in_backup) ? Boolean(row.include_image_in_backup)
: true, // Default to true : true, // Default to true
appTemplateId: row.app_template_id ? String(row.app_template_id) : undefined,
appTemplateVersion: row.app_template_version ? String(row.app_template_version) : undefined,
}; };
} }
} }
+142
View File
@@ -0,0 +1,142 @@
import { credentialEncryption } from '../classes/encryption.ts';
import type { AuthRepository } from './repositories/auth.repository.ts';
const encryptedSecretPrefix = 'enc:v1:';
const secretSettingAliases = {
backupPassword: ['backup_encryption_password'],
cloudflareToken: ['cloudflareAPIKey'],
dcrouterGatewayApiToken: ['externalGatewayApiToken'],
} as const;
type TCanonicalSecretSettingKey = keyof typeof secretSettingAliases;
export class SecretSettingsManager {
constructor(private authRepo: AuthRepository) {}
public isSecretKey(key: string): boolean {
return this.resolveCanonicalKey(key) !== null;
}
public getCanonicalKeys(): TCanonicalSecretSettingKey[] {
return Object.keys(secretSettingAliases) as TCanonicalSecretSettingKey[];
}
public async get(key: string): Promise<string | null> {
const canonicalKey = this.resolveCanonicalKey(key);
if (!canonicalKey) {
return null;
}
for (const candidateKey of this.getCandidateKeys(canonicalKey)) {
const secretValue = this.authRepo.getSecretSetting(candidateKey);
if (secretValue !== null) {
const decryptedValue = await this.decodeStoredValue(secretValue);
await this.normalizeStoredSecret(canonicalKey, candidateKey, secretValue, decryptedValue);
return decryptedValue;
}
const legacyValue = this.authRepo.getSetting(candidateKey);
if (legacyValue !== null) {
await this.set(canonicalKey, legacyValue);
if (candidateKey !== canonicalKey) {
this.authRepo.deleteSetting(candidateKey);
}
this.authRepo.deleteSetting(canonicalKey);
return legacyValue;
}
}
return null;
}
public async set(key: string, value: string | null): Promise<void> {
const canonicalKey = this.resolveCanonicalKey(key);
if (!canonicalKey) {
throw new Error(`Unsupported secret setting key: ${key}`);
}
if (!value) {
this.clear(canonicalKey);
return;
}
const encryptedValue = await credentialEncryption.encrypt({ value });
this.authRepo.setSecretSetting(canonicalKey, `${encryptedSecretPrefix}${encryptedValue}`);
for (const aliasKey of secretSettingAliases[canonicalKey]) {
this.authRepo.deleteSecretSetting(aliasKey);
this.authRepo.deleteSetting(aliasKey);
}
this.authRepo.deleteSetting(canonicalKey);
}
public async has(key: string): Promise<boolean> {
return (await this.get(key)) !== null;
}
public clear(key: string): void {
const canonicalKey = this.resolveCanonicalKey(key);
if (!canonicalKey) {
return;
}
this.authRepo.deleteSecretSetting(canonicalKey);
this.authRepo.deleteSetting(canonicalKey);
for (const aliasKey of secretSettingAliases[canonicalKey]) {
this.authRepo.deleteSecretSetting(aliasKey);
this.authRepo.deleteSetting(aliasKey);
}
}
private resolveCanonicalKey(key: string): TCanonicalSecretSettingKey | null {
if (key in secretSettingAliases) {
return key as TCanonicalSecretSettingKey;
}
for (const [canonicalKey, aliases] of Object.entries(secretSettingAliases)) {
if ((aliases as readonly string[]).includes(key)) {
return canonicalKey as TCanonicalSecretSettingKey;
}
}
return null;
}
private getCandidateKeys(canonicalKey: TCanonicalSecretSettingKey): string[] {
return [canonicalKey, ...secretSettingAliases[canonicalKey]];
}
private async decodeStoredValue(value: string): Promise<string> {
if (value.startsWith(encryptedSecretPrefix)) {
const decrypted = await credentialEncryption.decrypt<{ value: string }>(
value.slice(encryptedSecretPrefix.length),
);
return decrypted.value;
}
// Compatibility for any earlier secret_settings rows stored without encryption.
return value;
}
private async normalizeStoredSecret(
canonicalKey: TCanonicalSecretSettingKey,
sourceKey: string,
storedValue: string,
decryptedValue: string,
): Promise<void> {
if (sourceKey !== canonicalKey || !storedValue.startsWith(encryptedSecretPrefix)) {
await this.set(canonicalKey, decryptedValue);
if (sourceKey !== canonicalKey) {
this.authRepo.deleteSecretSetting(sourceKey);
}
}
this.authRepo.deleteSetting(canonicalKey);
for (const aliasKey of secretSettingAliases[canonicalKey]) {
this.authRepo.deleteSetting(aliasKey);
}
}
}
-2
View File
@@ -13,8 +13,6 @@ export { OneboxDnsManager } from './classes/dns.ts';
export { OneboxSslManager } from './classes/ssl.ts'; export { OneboxSslManager } from './classes/ssl.ts';
export { OneboxDaemon } from './classes/daemon.ts'; export { OneboxDaemon } from './classes/daemon.ts';
export { OneboxSystemd } from './classes/systemd.ts'; export { OneboxSystemd } from './classes/systemd.ts';
export { OneboxHttpServer } from './classes/httpserver.ts';
export { OneboxApiClient } from './classes/apiclient.ts';
// Types // Types
export * from './types.ts'; export * from './types.ts';
+110
View File
@@ -1,6 +1,7 @@
import * as plugins from '../plugins.ts'; import * as plugins from '../plugins.ts';
import { logger } from '../logging.ts'; import { logger } from '../logging.ts';
import type { Onebox } from '../classes/onebox.ts'; import type { Onebox } from '../classes/onebox.ts';
import * as interfaces from '../../ts_interfaces/index.ts';
import * as handlers from './handlers/index.ts'; import * as handlers from './handlers/index.ts';
import { files as bundledFiles } from '../../ts_bundled/bundle.ts'; import { files as bundledFiles } from '../../ts_bundled/bundle.ts';
@@ -24,6 +25,7 @@ export class OpsServer {
public settingsHandler!: handlers.SettingsHandler; public settingsHandler!: handlers.SettingsHandler;
public logsHandler!: handlers.LogsHandler; public logsHandler!: handlers.LogsHandler;
public workspaceHandler!: handlers.WorkspaceHandler; public workspaceHandler!: handlers.WorkspaceHandler;
public appStoreHandler!: handlers.AppStoreHandler;
constructor(oneboxRef: Onebox) { constructor(oneboxRef: Onebox) {
this.oneboxRef = oneboxRef; this.oneboxRef = oneboxRef;
@@ -34,6 +36,7 @@ export class OpsServer {
domain: 'localhost', domain: 'localhost',
feedMetadata: undefined, feedMetadata: undefined,
bundledContent: bundledFiles, bundledContent: bundledFiles,
addCustomRoutes: async (typedserver) => this.registerCustomRoutes(typedserver),
}); });
// Chain typedrouters: server -> opsServer -> individual handlers // Chain typedrouters: server -> opsServer -> individual handlers
@@ -65,14 +68,121 @@ export class OpsServer {
this.settingsHandler = new handlers.SettingsHandler(this); this.settingsHandler = new handlers.SettingsHandler(this);
this.logsHandler = new handlers.LogsHandler(this); this.logsHandler = new handlers.LogsHandler(this);
this.workspaceHandler = new handlers.WorkspaceHandler(this); this.workspaceHandler = new handlers.WorkspaceHandler(this);
this.appStoreHandler = new handlers.AppStoreHandler(this);
logger.success('OpsServer TypedRequest handlers initialized'); logger.success('OpsServer TypedRequest handlers initialized');
} }
private registerCustomRoutes(typedserver: plugins.typedserver.TypedServer): void {
typedserver.addRoute(
'/v2',
'ALL',
async (ctx) => this.oneboxRef.registry.handleRequest(ctx.request),
);
typedserver.addRoute(
'/v2/*',
'ALL',
async (ctx) => this.oneboxRef.registry.handleRequest(ctx.request),
);
typedserver.addRoute(
'/backups/:backupId/download',
'GET',
async (ctx) => {
const jwt = ctx.query.jwt;
if (!jwt) {
return new Response('Missing JWT', { status: 401 });
}
try {
await this.adminHandler.getVerifiedAdminIdentity({
jwt,
userId: '',
username: '',
expiresAt: 0,
role: 'user',
});
} catch {
return new Response('Unauthorized', { status: 401 });
}
const backupId = Number(ctx.params.backupId);
if (!Number.isInteger(backupId) || backupId < 1) {
return new Response('Invalid backup id', { status: 400 });
}
const backup = this.oneboxRef.database.getBackupById(backupId);
if (!backup) {
return new Response('Backup not found', { status: 404 });
}
const filename = this.sanitizeDownloadFilename(
backup.filename || `${backup.serviceName}-${backup.createdAt}.tar.enc`,
);
let filePath = this.oneboxRef.backupManager.getBackupFilePath(backupId);
let shouldCleanup = false;
if (!filePath) {
filePath = await this.oneboxRef.backupManager.getBackupExportPath(backupId);
shouldCleanup = !!filePath;
}
if (!filePath) {
return new Response('Backup export unavailable', { status: 404 });
}
try {
const fileData = await Deno.readFile(filePath);
return new Response(fileData, {
status: 200,
headers: {
'content-type': 'application/octet-stream',
'content-disposition': `attachment; filename="${filename}"`,
'content-length': String(fileData.byteLength),
'cache-control': 'no-store',
},
});
} finally {
if (shouldCleanup) {
await Deno.remove(filePath).catch(() => {});
}
}
},
);
}
private sanitizeDownloadFilename(filename: string): string {
return filename.replace(/["\\\r\n]/g, '_');
}
public async stop() { public async stop() {
if (this.server) { if (this.server) {
await this.server.stop(); await this.server.stop();
logger.success('OpsServer stopped'); logger.success('OpsServer stopped');
} }
} }
public async pushDashboardEvent(method: string, payload: unknown): Promise<void> {
const typedsocket = (this.server as any)?.typedserver?.typedsocket;
if (!typedsocket) {
return;
}
const connections = await typedsocket.findAllTargetConnectionsByTag('role', 'ops_dashboard');
await Promise.allSettled(
connections.map((connection: any) => typedsocket.createTypedRequest(method, connection).fire(payload)),
);
}
public async broadcastServiceUpdate(
serviceName: string,
action: interfaces.requests.IReq_PushServiceUpdate['request']['action'],
service?: interfaces.data.IService | null,
): Promise<void> {
await this.pushDashboardEvent('pushServiceUpdate', {
action,
serviceName,
service: service || undefined,
});
}
} }
+95 -55
View File
@@ -2,9 +2,12 @@ import * as plugins from '../../plugins.ts';
import { logger } from '../../logging.ts'; import { logger } from '../../logging.ts';
import type { OpsServer } from '../classes.opsserver.ts'; import type { OpsServer } from '../classes.opsserver.ts';
import * as interfaces from '../../../ts_interfaces/index.ts'; import * as interfaces from '../../../ts_interfaces/index.ts';
import { hashPassword, verifyPassword } from '../../utils/auth.ts';
export interface IJwtData { export interface IJwtData {
userId: string; userId: string;
username: string;
role: 'admin' | 'user';
status: 'loggedIn' | 'loggedOut'; status: 'loggedIn' | 'loggedOut';
expiresAt: number; expiresAt: number;
} }
@@ -18,12 +21,80 @@ export class AdminHandler {
} }
public async initialize(): Promise<void> { public async initialize(): Promise<void> {
this.smartjwtInstance = new plugins.smartjwt.SmartJwt(); this.smartjwtInstance = new plugins.smartjwt.SmartJwt<IJwtData>();
await this.smartjwtInstance.init(); await this.smartjwtInstance.init();
await this.smartjwtInstance.createNewKeyPair();
this.registerHandlers(); this.registerHandlers();
} }
private async createIdentityForUser(
user: interfaces.data.IUser & { id?: number },
expiresAt: number,
): Promise<interfaces.data.IIdentity> {
const userId = String(user.id || user.username);
const jwt = await this.smartjwtInstance.createJWT({
userId,
username: user.username,
role: user.role,
status: 'loggedIn',
expiresAt,
});
return {
jwt,
userId,
username: user.username,
expiresAt,
role: user.role,
};
}
public async getVerifiedIdentity(
identityArg: interfaces.data.IIdentity | null | undefined,
): Promise<interfaces.data.IIdentity> {
if (!identityArg?.jwt) {
throw new plugins.typedrequest.TypedResponseError('No identity provided');
}
let jwtData: IJwtData;
try {
jwtData = await this.smartjwtInstance.verifyJWTAndGetData(identityArg.jwt);
} catch {
throw new plugins.typedrequest.TypedResponseError('Valid identity required');
}
if (jwtData.expiresAt < Date.now() || jwtData.status !== 'loggedIn') {
throw new plugins.typedrequest.TypedResponseError('Valid identity required');
}
const user = this.opsServerRef.oneboxRef.database.getUserByUsername(jwtData.username);
if (!user) {
throw new plugins.typedrequest.TypedResponseError('Valid identity required');
}
const userId = String(user.id || user.username);
if (jwtData.userId !== userId) {
throw new plugins.typedrequest.TypedResponseError('Valid identity required');
}
return {
jwt: identityArg.jwt,
userId,
username: user.username,
expiresAt: jwtData.expiresAt,
role: user.role,
};
}
public async getVerifiedAdminIdentity(
identityArg: interfaces.data.IIdentity | null | undefined,
): Promise<interfaces.data.IIdentity> {
const identity = await this.getVerifiedIdentity(identityArg);
if (identity.role !== 'admin') {
throw new plugins.typedrequest.TypedResponseError('Admin access required');
}
return identity;
}
private registerHandlers(): void { private registerHandlers(): void {
// Login // Login
this.typedrouter.addTypedHandler( this.typedrouter.addTypedHandler(
@@ -36,30 +107,19 @@ export class AdminHandler {
throw new plugins.typedrequest.TypedResponseError('Invalid credentials'); throw new plugins.typedrequest.TypedResponseError('Invalid credentials');
} }
// Verify password (base64 comparison to match existing DB scheme) const passwordMatches = await verifyPassword(dataArg.password, user.passwordHash);
const passwordHash = btoa(dataArg.password); if (!passwordMatches) {
if (passwordHash !== user.passwordHash) {
throw new plugins.typedrequest.TypedResponseError('Invalid credentials'); throw new plugins.typedrequest.TypedResponseError('Invalid credentials');
} }
const expiresAt = Date.now() + 24 * 3600 * 1000; const expiresAt = Date.now() + 24 * 3600 * 1000;
const userId = String(user.id || user.username); const freshUser = this.opsServerRef.oneboxRef.database.getUserByUsername(user.username) || user;
const jwt = await this.smartjwtInstance.createJWT({ const identity = await this.createIdentityForUser(freshUser, expiresAt);
userId,
status: 'loggedIn',
expiresAt,
});
logger.info(`User logged in: ${user.username}`); logger.info(`User logged in: ${user.username}`);
return { return {
identity: { identity,
jwt,
userId,
username: user.username,
expiresAt,
role: user.role,
},
}; };
} catch (error) { } catch (error) {
if (error instanceof plugins.typedrequest.TypedResponseError) throw error; if (error instanceof plugins.typedrequest.TypedResponseError) throw error;
@@ -84,22 +144,11 @@ export class AdminHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_VerifyIdentity>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_VerifyIdentity>(
'verifyIdentity', 'verifyIdentity',
async (dataArg) => { async (dataArg) => {
if (!dataArg.identity?.jwt) {
return { valid: false };
}
try { try {
const jwtData = await this.smartjwtInstance.verifyJWTAndGetData(dataArg.identity.jwt); const identity = await this.getVerifiedIdentity(dataArg.identity);
if (jwtData.expiresAt < Date.now()) return { valid: false };
if (jwtData.status !== 'loggedIn') return { valid: false };
return { return {
valid: true, valid: true,
identity: { identity,
jwt: dataArg.identity.jwt,
userId: jwtData.userId,
username: dataArg.identity.username,
expiresAt: jwtData.expiresAt,
role: dataArg.identity.role,
},
}; };
} catch { } catch {
return { valid: false }; return { valid: false };
@@ -110,21 +159,21 @@ export class AdminHandler {
// Change Password // Change Password
this.typedrouter.addTypedHandler( this.typedrouter.addTypedHandler(
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_ChangePassword>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_ChangePassword>(
'changePassword', 'changePassword',
async (dataArg) => { async (dataArg) => {
await this.requireValidIdentity(dataArg); const identity = await this.getVerifiedIdentity(dataArg.identity);
const user = this.opsServerRef.oneboxRef.database.getUserByUsername(dataArg.identity.username); const user = this.opsServerRef.oneboxRef.database.getUserByUsername(identity.username);
if (!user) { if (!user) {
throw new plugins.typedrequest.TypedResponseError('User not found'); throw new plugins.typedrequest.TypedResponseError('User not found');
} }
const currentHash = btoa(dataArg.currentPassword); const currentPasswordMatches = await verifyPassword(dataArg.currentPassword, user.passwordHash);
if (currentHash !== user.passwordHash) { if (!currentPasswordMatches) {
throw new plugins.typedrequest.TypedResponseError('Current password is incorrect'); throw new plugins.typedrequest.TypedResponseError('Current password is incorrect');
} }
const newHash = btoa(dataArg.newPassword); const newHash = await hashPassword(dataArg.newPassword);
this.opsServerRef.oneboxRef.database.updateUserPassword(user.username, newHash); this.opsServerRef.oneboxRef.database.updateUserPassword(user.username, newHash);
logger.info(`Password changed for user: ${user.username}`); logger.info(`Password changed for user: ${user.username}`);
@@ -134,25 +183,13 @@ export class AdminHandler {
); );
} }
private async requireValidIdentity(dataArg: { identity: interfaces.data.IIdentity }): Promise<void> {
const passed = await this.validIdentityGuard.exec({ identity: dataArg.identity });
if (!passed) {
throw new plugins.typedrequest.TypedResponseError('Valid identity required');
}
}
// Guard for valid identity // Guard for valid identity
public validIdentityGuard = new plugins.smartguard.Guard<{ public validIdentityGuard = new plugins.smartguard.Guard<{
identity: interfaces.data.IIdentity; identity: interfaces.data.IIdentity;
}>( }>(
async (dataArg) => { async (dataArg) => {
if (!dataArg.identity?.jwt) return false;
try { try {
const jwtData = await this.smartjwtInstance.verifyJWTAndGetData(dataArg.identity.jwt); await this.getVerifiedIdentity(dataArg.identity);
if (jwtData.expiresAt < Date.now()) return false;
if (jwtData.status !== 'loggedIn') return false;
if (dataArg.identity.expiresAt !== jwtData.expiresAt) return false;
if (dataArg.identity.userId !== jwtData.userId) return false;
return true; return true;
} catch { } catch {
return false; return false;
@@ -166,9 +203,12 @@ export class AdminHandler {
identity: interfaces.data.IIdentity; identity: interfaces.data.IIdentity;
}>( }>(
async (dataArg) => { async (dataArg) => {
const isValid = await this.validIdentityGuard.exec(dataArg); try {
if (!isValid) return false; const identity = await this.getVerifiedIdentity(dataArg.identity);
return dataArg.identity.role === 'admin'; return identity.role === 'admin';
} catch {
return false;
}
}, },
{ failedHint: 'user is not admin', name: 'adminIdentityGuard' }, { failedHint: 'user is not admin', name: 'adminIdentityGuard' },
); );
+104
View File
@@ -0,0 +1,104 @@
import * as plugins from '../../plugins.ts';
import { logger } from '../../logging.ts';
import type { OpsServer } from '../classes.opsserver.ts';
import * as interfaces from '../../../ts_interfaces/index.ts';
import { requireAdminIdentity } from '../helpers/guards.ts';
export class AppStoreHandler {
public typedrouter = new plugins.typedrequest.TypedRouter();
constructor(private opsServerRef: OpsServer) {
this.opsServerRef.typedrouter.addTypedRouter(this.typedrouter);
this.registerHandlers();
}
private registerHandlers(): void {
// Get app templates (catalog)
this.typedrouter.addTypedHandler(
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetAppTemplates>(
'getAppTemplates',
async (dataArg) => {
await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const apps = await this.opsServerRef.oneboxRef.appStore.getApps();
return { apps };
},
),
);
// Get app config for a specific version
this.typedrouter.addTypedHandler(
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetAppConfig>(
'getAppConfig',
async (dataArg) => {
await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const config = await this.opsServerRef.oneboxRef.appStore.getAppVersionConfig(
dataArg.appId,
dataArg.version,
);
const appMeta = await this.opsServerRef.oneboxRef.appStore.getAppMeta(dataArg.appId);
return { config, appMeta };
},
),
);
// Get services with available upgrades
this.typedrouter.addTypedHandler(
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetUpgradeableServices>(
'getUpgradeableServices',
async (dataArg) => {
await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const services = await this.opsServerRef.oneboxRef.appStore.getUpgradeableServices();
return { services };
},
),
);
// Upgrade a service to a new template version
this.typedrouter.addTypedHandler(
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_UpgradeService>(
'upgradeService',
async (dataArg) => {
await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const existingService = this.opsServerRef.oneboxRef.database.getServiceByName(dataArg.serviceName);
if (!existingService) {
throw new plugins.typedrequest.TypedResponseError(`Service not found: ${dataArg.serviceName}`);
}
if (!existingService.appTemplateId) {
throw new plugins.typedrequest.TypedResponseError('Service was not deployed from an app template');
}
if (!existingService.appTemplateVersion) {
throw new plugins.typedrequest.TypedResponseError('Service has no tracked template version');
}
logger.info(`Upgrading service '${dataArg.serviceName}' from v${existingService.appTemplateVersion} to v${dataArg.targetVersion}`);
// Execute migration
const migrationResult = await this.opsServerRef.oneboxRef.appStore.executeMigration(
existingService,
existingService.appTemplateVersion,
dataArg.targetVersion,
);
if (!migrationResult.success) {
throw new plugins.typedrequest.TypedResponseError(
`Migration failed: ${migrationResult.warnings.join('; ')}`,
);
}
// Apply the upgrade
const updatedService = await this.opsServerRef.oneboxRef.appStore.applyUpgrade(
dataArg.serviceName,
migrationResult,
dataArg.targetVersion,
);
return {
service: updatedService,
warnings: migrationResult.warnings,
};
},
),
);
}
}
+10 -17
View File
@@ -1,7 +1,7 @@
import * as plugins from '../../plugins.ts'; import * as plugins from '../../plugins.ts';
import type { OpsServer } from '../classes.opsserver.ts'; import type { OpsServer } from '../classes.opsserver.ts';
import * as interfaces from '../../../ts_interfaces/index.ts'; import * as interfaces from '../../../ts_interfaces/index.ts';
import { requireValidIdentity } from '../helpers/guards.ts'; import { requireAdminIdentity } from '../helpers/guards.ts';
export class BackupsHandler { export class BackupsHandler {
public typedrouter = new plugins.typedrequest.TypedRouter(); public typedrouter = new plugins.typedrequest.TypedRouter();
@@ -16,7 +16,7 @@ export class BackupsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetBackups>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetBackups>(
'getBackups', 'getBackups',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const backups = this.opsServerRef.oneboxRef.backupManager.listBackups(); const backups = this.opsServerRef.oneboxRef.backupManager.listBackups();
return { backups }; return { backups };
}, },
@@ -27,7 +27,7 @@ export class BackupsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetBackup>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetBackup>(
'getBackup', 'getBackup',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const backup = this.opsServerRef.oneboxRef.database.getBackupById(dataArg.backupId); const backup = this.opsServerRef.oneboxRef.database.getBackupById(dataArg.backupId);
if (!backup) { if (!backup) {
throw new plugins.typedrequest.TypedResponseError('Backup not found'); throw new plugins.typedrequest.TypedResponseError('Backup not found');
@@ -41,7 +41,7 @@ export class BackupsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_DeleteBackup>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_DeleteBackup>(
'deleteBackup', 'deleteBackup',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
await this.opsServerRef.oneboxRef.backupManager.deleteBackup(dataArg.backupId); await this.opsServerRef.oneboxRef.backupManager.deleteBackup(dataArg.backupId);
return { ok: true }; return { ok: true };
}, },
@@ -52,13 +52,9 @@ export class BackupsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_RestoreBackup>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_RestoreBackup>(
'restoreBackup', 'restoreBackup',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const backupPath = this.opsServerRef.oneboxRef.backupManager.getBackupFilePath(dataArg.backupId);
if (!backupPath) {
throw new plugins.typedrequest.TypedResponseError('Backup file not found');
}
const rawResult = await this.opsServerRef.oneboxRef.backupManager.restoreBackup( const rawResult = await this.opsServerRef.oneboxRef.backupManager.restoreBackup(
backupPath, dataArg.backupId,
dataArg.options, dataArg.options,
); );
return { return {
@@ -79,19 +75,16 @@ export class BackupsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_DownloadBackup>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_DownloadBackup>(
'downloadBackup', 'downloadBackup',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const backup = this.opsServerRef.oneboxRef.database.getBackupById(dataArg.backupId); const backup = this.opsServerRef.oneboxRef.database.getBackupById(dataArg.backupId);
if (!backup) { if (!backup) {
throw new plugins.typedrequest.TypedResponseError('Backup not found'); throw new plugins.typedrequest.TypedResponseError('Backup not found');
} }
const filePath = this.opsServerRef.oneboxRef.backupManager.getBackupFilePath(dataArg.backupId);
if (!filePath) {
throw new plugins.typedrequest.TypedResponseError('Backup file not found');
}
// Return a download URL that the client can fetch directly // Return a download URL that the client can fetch directly
const filename = backup.filename || `${backup.serviceName}-${backup.createdAt}.tar.enc`;
return { return {
downloadUrl: `/api/backups/${dataArg.backupId}/download`, downloadUrl: `/backups/${dataArg.backupId}/download?jwt=${encodeURIComponent(dataArg.identity.jwt)}`,
filename: backup.filename, filename,
}; };
}, },
), ),
+5 -5
View File
@@ -1,7 +1,7 @@
import * as plugins from '../../plugins.ts'; import * as plugins from '../../plugins.ts';
import type { OpsServer } from '../classes.opsserver.ts'; import type { OpsServer } from '../classes.opsserver.ts';
import * as interfaces from '../../../ts_interfaces/index.ts'; import * as interfaces from '../../../ts_interfaces/index.ts';
import { requireValidIdentity } from '../helpers/guards.ts'; import { requireAdminIdentity } from '../helpers/guards.ts';
export class DnsHandler { export class DnsHandler {
public typedrouter = new plugins.typedrequest.TypedRouter(); public typedrouter = new plugins.typedrequest.TypedRouter();
@@ -16,7 +16,7 @@ export class DnsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetDnsRecords>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetDnsRecords>(
'getDnsRecords', 'getDnsRecords',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const records = this.opsServerRef.oneboxRef.dns.listDNSRecords(); const records = this.opsServerRef.oneboxRef.dns.listDNSRecords();
return { records }; return { records };
}, },
@@ -27,7 +27,7 @@ export class DnsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_CreateDnsRecord>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_CreateDnsRecord>(
'createDnsRecord', 'createDnsRecord',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
await this.opsServerRef.oneboxRef.dns.addDNSRecord(dataArg.domain, dataArg.value); await this.opsServerRef.oneboxRef.dns.addDNSRecord(dataArg.domain, dataArg.value);
const records = this.opsServerRef.oneboxRef.dns.listDNSRecords(); const records = this.opsServerRef.oneboxRef.dns.listDNSRecords();
const record = records.find((r: any) => r.domain === dataArg.domain); const record = records.find((r: any) => r.domain === dataArg.domain);
@@ -40,7 +40,7 @@ export class DnsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_DeleteDnsRecord>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_DeleteDnsRecord>(
'deleteDnsRecord', 'deleteDnsRecord',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
await this.opsServerRef.oneboxRef.dns.removeDNSRecord(dataArg.domain); await this.opsServerRef.oneboxRef.dns.removeDNSRecord(dataArg.domain);
return { ok: true }; return { ok: true };
}, },
@@ -51,7 +51,7 @@ export class DnsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_SyncDns>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_SyncDns>(
'syncDns', 'syncDns',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
if (!this.opsServerRef.oneboxRef.dns.isConfigured()) { if (!this.opsServerRef.oneboxRef.dns.isConfigured()) {
throw new plugins.typedrequest.TypedResponseError('DNS manager not configured'); throw new plugins.typedrequest.TypedResponseError('DNS manager not configured');
} }
+4 -4
View File
@@ -1,7 +1,7 @@
import * as plugins from '../../plugins.ts'; import * as plugins from '../../plugins.ts';
import type { OpsServer } from '../classes.opsserver.ts'; import type { OpsServer } from '../classes.opsserver.ts';
import * as interfaces from '../../../ts_interfaces/index.ts'; import * as interfaces from '../../../ts_interfaces/index.ts';
import { requireValidIdentity } from '../helpers/guards.ts'; import { requireAdminIdentity } from '../helpers/guards.ts';
export class DomainsHandler { export class DomainsHandler {
public typedrouter = new plugins.typedrequest.TypedRouter(); public typedrouter = new plugins.typedrequest.TypedRouter();
@@ -57,7 +57,7 @@ export class DomainsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetDomains>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetDomains>(
'getDomains', 'getDomains',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const domains = this.buildDomainViews(); const domains = this.buildDomainViews();
return { domains }; return { domains };
}, },
@@ -68,7 +68,7 @@ export class DomainsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetDomain>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetDomain>(
'getDomain', 'getDomain',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const domain = this.opsServerRef.oneboxRef.database.getDomainByName(dataArg.domainName); const domain = this.opsServerRef.oneboxRef.database.getDomainByName(dataArg.domainName);
if (!domain) { if (!domain) {
throw new plugins.typedrequest.TypedResponseError('Domain not found'); throw new plugins.typedrequest.TypedResponseError('Domain not found');
@@ -87,7 +87,7 @@ export class DomainsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_SyncDomains>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_SyncDomains>(
'syncDomains', 'syncDomains',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
if (!this.opsServerRef.oneboxRef.cloudflareDomainSync) { if (!this.opsServerRef.oneboxRef.cloudflareDomainSync) {
throw new plugins.typedrequest.TypedResponseError('Cloudflare domain sync not configured'); throw new plugins.typedrequest.TypedResponseError('Cloudflare domain sync not configured');
} }
+1
View File
@@ -12,3 +12,4 @@ export * from './schedules.handler.ts';
export * from './settings.handler.ts'; export * from './settings.handler.ts';
export * from './logs.handler.ts'; export * from './logs.handler.ts';
export * from './workspace.handler.ts'; export * from './workspace.handler.ts';
export * from './appstore.handler.ts';
+8 -8
View File
@@ -2,7 +2,7 @@ import * as plugins from '../../plugins.ts';
import { logger } from '../../logging.ts'; import { logger } from '../../logging.ts';
import type { OpsServer } from '../classes.opsserver.ts'; import type { OpsServer } from '../classes.opsserver.ts';
import * as interfaces from '../../../ts_interfaces/index.ts'; import * as interfaces from '../../../ts_interfaces/index.ts';
import { requireValidIdentity } from '../helpers/guards.ts'; import { requireAdminIdentity } from '../helpers/guards.ts';
export class LogsHandler { export class LogsHandler {
public typedrouter = new plugins.typedrequest.TypedRouter(); public typedrouter = new plugins.typedrequest.TypedRouter();
@@ -18,7 +18,7 @@ export class LogsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServiceLogStream>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServiceLogStream>(
'getServiceLogStream', 'getServiceLogStream',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const service = this.opsServerRef.oneboxRef.database.getServiceByName(dataArg.serviceName); const service = this.opsServerRef.oneboxRef.database.getServiceByName(dataArg.serviceName);
if (!service) { if (!service) {
@@ -99,7 +99,7 @@ export class LogsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetPlatformServiceLogStream>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetPlatformServiceLogStream>(
'getPlatformServiceLogStream', 'getPlatformServiceLogStream',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const platformService = this.opsServerRef.oneboxRef.database.getPlatformServiceByType( const platformService = this.opsServerRef.oneboxRef.database.getPlatformServiceByType(
dataArg.serviceType, dataArg.serviceType,
@@ -160,26 +160,26 @@ export class LogsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetNetworkLogStream>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetNetworkLogStream>(
'getNetworkLogStream', 'getNetworkLogStream',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const virtualStream = new plugins.typedrequest.VirtualStream<Uint8Array>(); const virtualStream = new plugins.typedrequest.VirtualStream<Uint8Array>();
const encoder = new TextEncoder(); const encoder = new TextEncoder();
const clientId = crypto.randomUUID(); const clientId = crypto.randomUUID();
// Create a mock WebSocket-like object for the CaddyLogReceiver // Create a mock WebSocket-like object for the proxy log receiver.
const mockSocket = { const mockSocket = {
readyState: 1, // WebSocket.OPEN readyState: 1, // WebSocket.OPEN
send: (data: string) => { send: (data: string) => {
try { try {
virtualStream.sendData(encoder.encode(data)); virtualStream.sendData(encoder.encode(data));
} catch { } catch {
this.opsServerRef.oneboxRef.caddyLogReceiver.removeClient(clientId); this.opsServerRef.oneboxRef.proxyLogReceiver.removeClient(clientId);
} }
}, },
}; };
const filter = dataArg.filter || {}; const filter = dataArg.filter || {};
this.opsServerRef.oneboxRef.caddyLogReceiver.addClient( this.opsServerRef.oneboxRef.proxyLogReceiver.addClient(
clientId, clientId,
mockSocket as any, mockSocket as any,
filter, filter,
@@ -195,7 +195,7 @@ export class LogsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetEventStream>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetEventStream>(
'getEventStream', 'getEventStream',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const virtualStream = new plugins.typedrequest.VirtualStream<Uint8Array>(); const virtualStream = new plugins.typedrequest.VirtualStream<Uint8Array>();
const encoder = new TextEncoder(); const encoder = new TextEncoder();
+8 -7
View File
@@ -1,7 +1,7 @@
import * as plugins from '../../plugins.ts'; import * as plugins from '../../plugins.ts';
import type { OpsServer } from '../classes.opsserver.ts'; import type { OpsServer } from '../classes.opsserver.ts';
import * as interfaces from '../../../ts_interfaces/index.ts'; import * as interfaces from '../../../ts_interfaces/index.ts';
import { requireValidIdentity } from '../helpers/guards.ts'; import { requireAdminIdentity } from '../helpers/guards.ts';
import type { TPlatformServiceType } from '../../types.ts'; import type { TPlatformServiceType } from '../../types.ts';
export class NetworkHandler { export class NetworkHandler {
@@ -19,8 +19,9 @@ export class NetworkHandler {
redis: 6379, redis: 6379,
postgresql: 5432, postgresql: 5432,
rabbitmq: 5672, rabbitmq: 5672,
caddy: 80, smartproxy: 80,
clickhouse: 8123, clickhouse: 8123,
mariadb: 3306,
}; };
return ports[type] || 0; return ports[type] || 0;
} }
@@ -30,7 +31,7 @@ export class NetworkHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetNetworkTargets>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetNetworkTargets>(
'getNetworkTargets', 'getNetworkTargets',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const targets: interfaces.data.INetworkTarget[] = []; const targets: interfaces.data.INetworkTarget[] = [];
// Services // Services
@@ -82,9 +83,9 @@ export class NetworkHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetNetworkStats>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetNetworkStats>(
'getNetworkStats', 'getNetworkStats',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const proxyStatus = this.opsServerRef.oneboxRef.reverseProxy.getStatus() as any; const proxyStatus = this.opsServerRef.oneboxRef.reverseProxy.getStatus() as any;
const logReceiverStats = this.opsServerRef.oneboxRef.caddyLogReceiver.getStats(); const logReceiverStats = this.opsServerRef.oneboxRef.proxyLogReceiver.getStats();
return { return {
stats: { stats: {
@@ -113,8 +114,8 @@ export class NetworkHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetTrafficStats>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetTrafficStats>(
'getTrafficStats', 'getTrafficStats',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const trafficStats = this.opsServerRef.oneboxRef.caddyLogReceiver.getTrafficStats(60); const trafficStats = this.opsServerRef.oneboxRef.proxyLogReceiver.getTrafficStats(60);
return { stats: trafficStats }; return { stats: trafficStats };
}, },
), ),
+11 -37
View File
@@ -2,7 +2,7 @@ import * as plugins from '../../plugins.ts';
import { logger } from '../../logging.ts'; import { logger } from '../../logging.ts';
import type { OpsServer } from '../classes.opsserver.ts'; import type { OpsServer } from '../classes.opsserver.ts';
import * as interfaces from '../../../ts_interfaces/index.ts'; import * as interfaces from '../../../ts_interfaces/index.ts';
import { requireValidIdentity } from '../helpers/guards.ts'; import { requireAdminIdentity } from '../helpers/guards.ts';
export class PlatformHandler { export class PlatformHandler {
public typedrouter = new plugins.typedrequest.TypedRouter(); public typedrouter = new plugins.typedrequest.TypedRouter();
@@ -91,21 +91,8 @@ export class PlatformHandler {
line: string, line: string,
isError: boolean, isError: boolean,
): void { ): void {
const typedsocket = (this.opsServerRef.server as any)?.typedserver?.typedsocket;
if (!typedsocket) return;
const entry = this.parseLogLine(line, isError); const entry = this.parseLogLine(line, isError);
void this.opsServerRef.pushDashboardEvent('pushPlatformServiceLog', { serviceType, entry });
typedsocket.findAllTargetConnectionsByTag('role', 'ops_dashboard')
.then((connections: any[]) => {
for (const conn of connections) {
typedsocket.createTypedRequest<interfaces.requests.IReq_PushPlatformServiceLog>(
'pushPlatformServiceLog',
conn,
).fire({ serviceType, entry }).catch(() => {});
}
})
.catch(() => {});
} }
private pushServiceLogToClients( private pushServiceLogToClients(
@@ -113,21 +100,8 @@ export class PlatformHandler {
line: string, line: string,
isError: boolean, isError: boolean,
): void { ): void {
const typedsocket = (this.opsServerRef.server as any)?.typedserver?.typedsocket;
if (!typedsocket) return;
const entry = this.parseLogLine(line, isError); const entry = this.parseLogLine(line, isError);
void this.opsServerRef.pushDashboardEvent('pushServiceLog', { serviceName, entry });
typedsocket.findAllTargetConnectionsByTag('role', 'ops_dashboard')
.then((connections: any[]) => {
for (const conn of connections) {
typedsocket.createTypedRequest<interfaces.requests.IReq_PushServiceLog>(
'pushServiceLog',
conn,
).fire({ serviceName, entry }).catch(() => {});
}
})
.catch(() => {});
} }
private registerHandlers(): void { private registerHandlers(): void {
@@ -136,7 +110,7 @@ export class PlatformHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetPlatformServices>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetPlatformServices>(
'getPlatformServices', 'getPlatformServices',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const platformServices = this.opsServerRef.oneboxRef.platformServices.getAllPlatformServices(); const platformServices = this.opsServerRef.oneboxRef.platformServices.getAllPlatformServices();
const providers = this.opsServerRef.oneboxRef.platformServices.getAllProviders(); const providers = this.opsServerRef.oneboxRef.platformServices.getAllProviders();
@@ -145,7 +119,7 @@ export class PlatformHandler {
const isCore = 'isCore' in provider && (provider as any).isCore === true; const isCore = 'isCore' in provider && (provider as any).isCore === true;
let status: string = service?.status || 'not-deployed'; let status: string = service?.status || 'not-deployed';
if (provider.type === 'caddy') { if (provider.type === 'smartproxy') {
const proxyStatus = this.opsServerRef.oneboxRef.reverseProxy.getStatus() as any; const proxyStatus = this.opsServerRef.oneboxRef.reverseProxy.getStatus() as any;
status = (proxyStatus.running ?? proxyStatus.http?.running) ? 'running' : 'stopped'; status = (proxyStatus.running ?? proxyStatus.http?.running) ? 'running' : 'stopped';
} }
@@ -172,7 +146,7 @@ export class PlatformHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetPlatformService>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetPlatformService>(
'getPlatformService', 'getPlatformService',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const provider = this.opsServerRef.oneboxRef.platformServices.getProvider(dataArg.serviceType); const provider = this.opsServerRef.oneboxRef.platformServices.getProvider(dataArg.serviceType);
if (!provider) { if (!provider) {
throw new plugins.typedrequest.TypedResponseError(`Unknown platform service type: ${dataArg.serviceType}`); throw new plugins.typedrequest.TypedResponseError(`Unknown platform service type: ${dataArg.serviceType}`);
@@ -182,7 +156,7 @@ export class PlatformHandler {
const isCore = 'isCore' in provider && (provider as any).isCore === true; const isCore = 'isCore' in provider && (provider as any).isCore === true;
let rawStatus: string = service?.status || 'not-deployed'; let rawStatus: string = service?.status || 'not-deployed';
if (dataArg.serviceType === 'caddy') { if (dataArg.serviceType === 'smartproxy') {
const proxyStatus = this.opsServerRef.oneboxRef.reverseProxy.getStatus() as any; const proxyStatus = this.opsServerRef.oneboxRef.reverseProxy.getStatus() as any;
rawStatus = (proxyStatus.running ?? proxyStatus.http?.running) ? 'running' : 'stopped'; rawStatus = (proxyStatus.running ?? proxyStatus.http?.running) ? 'running' : 'stopped';
} }
@@ -208,7 +182,7 @@ export class PlatformHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_StartPlatformService>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_StartPlatformService>(
'startPlatformService', 'startPlatformService',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const provider = this.opsServerRef.oneboxRef.platformServices.getProvider(dataArg.serviceType); const provider = this.opsServerRef.oneboxRef.platformServices.getProvider(dataArg.serviceType);
if (!provider) { if (!provider) {
throw new plugins.typedrequest.TypedResponseError(`Unknown platform service type: ${dataArg.serviceType}`); throw new plugins.typedrequest.TypedResponseError(`Unknown platform service type: ${dataArg.serviceType}`);
@@ -235,7 +209,7 @@ export class PlatformHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_StopPlatformService>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_StopPlatformService>(
'stopPlatformService', 'stopPlatformService',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const provider = this.opsServerRef.oneboxRef.platformServices.getProvider(dataArg.serviceType); const provider = this.opsServerRef.oneboxRef.platformServices.getProvider(dataArg.serviceType);
if (!provider) { if (!provider) {
throw new plugins.typedrequest.TypedResponseError(`Unknown platform service type: ${dataArg.serviceType}`); throw new plugins.typedrequest.TypedResponseError(`Unknown platform service type: ${dataArg.serviceType}`);
@@ -268,7 +242,7 @@ export class PlatformHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetPlatformServiceStats>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetPlatformServiceStats>(
'getPlatformServiceStats', 'getPlatformServiceStats',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const service = this.opsServerRef.oneboxRef.database.getPlatformServiceByType(dataArg.serviceType); const service = this.opsServerRef.oneboxRef.database.getPlatformServiceByType(dataArg.serviceType);
if (!service || !service.containerId) { if (!service || !service.containerId) {
throw new plugins.typedrequest.TypedResponseError('Platform service has no container'); throw new plugins.typedrequest.TypedResponseError('Platform service has no container');
@@ -289,7 +263,7 @@ export class PlatformHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetPlatformServiceLogs>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetPlatformServiceLogs>(
'getPlatformServiceLogs', 'getPlatformServiceLogs',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const service = this.opsServerRef.oneboxRef.database.getPlatformServiceByType(dataArg.serviceType); const service = this.opsServerRef.oneboxRef.database.getPlatformServiceByType(dataArg.serviceType);
if (!service || !service.containerId) { if (!service || !service.containerId) {
throw new plugins.typedrequest.TypedResponseError('Platform service has no container'); throw new plugins.typedrequest.TypedResponseError('Platform service has no container');
+6 -6
View File
@@ -1,7 +1,7 @@
import * as plugins from '../../plugins.ts'; import * as plugins from '../../plugins.ts';
import type { OpsServer } from '../classes.opsserver.ts'; import type { OpsServer } from '../classes.opsserver.ts';
import * as interfaces from '../../../ts_interfaces/index.ts'; import * as interfaces from '../../../ts_interfaces/index.ts';
import { requireValidIdentity } from '../helpers/guards.ts'; import { requireAdminIdentity } from '../helpers/guards.ts';
export class RegistryHandler { export class RegistryHandler {
public typedrouter = new plugins.typedrequest.TypedRouter(); public typedrouter = new plugins.typedrequest.TypedRouter();
@@ -17,7 +17,7 @@ export class RegistryHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetRegistryTags>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetRegistryTags>(
'getRegistryTags', 'getRegistryTags',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const tags = await this.opsServerRef.oneboxRef.registry.getImageTags(dataArg.serviceName); const tags = await this.opsServerRef.oneboxRef.registry.getImageTags(dataArg.serviceName);
return { tags }; return { tags };
}, },
@@ -29,7 +29,7 @@ export class RegistryHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetRegistryTokens>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetRegistryTokens>(
'getRegistryTokens', 'getRegistryTokens',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const rawTokens = this.opsServerRef.oneboxRef.database.getAllRegistryTokens(); const rawTokens = this.opsServerRef.oneboxRef.database.getAllRegistryTokens();
const now = Date.now(); const now = Date.now();
@@ -68,7 +68,7 @@ export class RegistryHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_CreateRegistryToken>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_CreateRegistryToken>(
'createRegistryToken', 'createRegistryToken',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); const identity = await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const config = dataArg.tokenConfig; const config = dataArg.tokenConfig;
// Calculate expiration // Calculate expiration
@@ -95,7 +95,7 @@ export class RegistryHandler {
expiresAt, expiresAt,
createdAt: now, createdAt: now,
lastUsedAt: null, lastUsedAt: null,
createdBy: dataArg.identity.username, createdBy: identity.username,
}); });
let scopeDisplay: string; let scopeDisplay: string;
@@ -133,7 +133,7 @@ export class RegistryHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_DeleteRegistryToken>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_DeleteRegistryToken>(
'deleteRegistryToken', 'deleteRegistryToken',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const token = this.opsServerRef.oneboxRef.database.getRegistryTokenById(dataArg.tokenId); const token = this.opsServerRef.oneboxRef.database.getRegistryTokenById(dataArg.tokenId);
if (!token) { if (!token) {
throw new plugins.typedrequest.TypedResponseError('Token not found'); throw new plugins.typedrequest.TypedResponseError('Token not found');
+7 -7
View File
@@ -1,7 +1,7 @@
import * as plugins from '../../plugins.ts'; import * as plugins from '../../plugins.ts';
import type { OpsServer } from '../classes.opsserver.ts'; import type { OpsServer } from '../classes.opsserver.ts';
import * as interfaces from '../../../ts_interfaces/index.ts'; import * as interfaces from '../../../ts_interfaces/index.ts';
import { requireValidIdentity } from '../helpers/guards.ts'; import { requireAdminIdentity } from '../helpers/guards.ts';
export class SchedulesHandler { export class SchedulesHandler {
public typedrouter = new plugins.typedrequest.TypedRouter(); public typedrouter = new plugins.typedrequest.TypedRouter();
@@ -16,7 +16,7 @@ export class SchedulesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetBackupSchedules>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetBackupSchedules>(
'getBackupSchedules', 'getBackupSchedules',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const schedules = this.opsServerRef.oneboxRef.backupScheduler.getAllSchedules(); const schedules = this.opsServerRef.oneboxRef.backupScheduler.getAllSchedules();
return { schedules }; return { schedules };
}, },
@@ -27,7 +27,7 @@ export class SchedulesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_CreateBackupSchedule>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_CreateBackupSchedule>(
'createBackupSchedule', 'createBackupSchedule',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const schedule = await this.opsServerRef.oneboxRef.backupScheduler.createSchedule( const schedule = await this.opsServerRef.oneboxRef.backupScheduler.createSchedule(
dataArg.scheduleConfig, dataArg.scheduleConfig,
); );
@@ -40,7 +40,7 @@ export class SchedulesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetBackupSchedule>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetBackupSchedule>(
'getBackupSchedule', 'getBackupSchedule',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const schedule = this.opsServerRef.oneboxRef.backupScheduler.getScheduleById(dataArg.scheduleId); const schedule = this.opsServerRef.oneboxRef.backupScheduler.getScheduleById(dataArg.scheduleId);
if (!schedule) { if (!schedule) {
throw new plugins.typedrequest.TypedResponseError('Schedule not found'); throw new plugins.typedrequest.TypedResponseError('Schedule not found');
@@ -54,7 +54,7 @@ export class SchedulesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_UpdateBackupSchedule>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_UpdateBackupSchedule>(
'updateBackupSchedule', 'updateBackupSchedule',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const schedule = await this.opsServerRef.oneboxRef.backupScheduler.updateSchedule( const schedule = await this.opsServerRef.oneboxRef.backupScheduler.updateSchedule(
dataArg.scheduleId, dataArg.scheduleId,
dataArg.updates, dataArg.updates,
@@ -68,7 +68,7 @@ export class SchedulesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_DeleteBackupSchedule>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_DeleteBackupSchedule>(
'deleteBackupSchedule', 'deleteBackupSchedule',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
await this.opsServerRef.oneboxRef.backupScheduler.deleteSchedule(dataArg.scheduleId); await this.opsServerRef.oneboxRef.backupScheduler.deleteSchedule(dataArg.scheduleId);
return { ok: true }; return { ok: true };
}, },
@@ -79,7 +79,7 @@ export class SchedulesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_TriggerBackupSchedule>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_TriggerBackupSchedule>(
'triggerBackupSchedule', 'triggerBackupSchedule',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
await this.opsServerRef.oneboxRef.backupScheduler.triggerBackup(dataArg.scheduleId); await this.opsServerRef.oneboxRef.backupScheduler.triggerBackup(dataArg.scheduleId);
// triggerBackup is void; the backup is created async by the scheduler // triggerBackup is void; the backup is created async by the scheduler
// Return the most recent backup for the schedule // Return the most recent backup for the schedule
+16 -16
View File
@@ -2,7 +2,7 @@ import * as plugins from '../../plugins.ts';
import { logger } from '../../logging.ts'; import { logger } from '../../logging.ts';
import type { OpsServer } from '../classes.opsserver.ts'; import type { OpsServer } from '../classes.opsserver.ts';
import * as interfaces from '../../../ts_interfaces/index.ts'; import * as interfaces from '../../../ts_interfaces/index.ts';
import { requireValidIdentity } from '../helpers/guards.ts'; import { requireAdminIdentity } from '../helpers/guards.ts';
export class ServicesHandler { export class ServicesHandler {
public typedrouter = new plugins.typedrequest.TypedRouter(); public typedrouter = new plugins.typedrequest.TypedRouter();
@@ -18,7 +18,7 @@ export class ServicesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServices>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServices>(
'getServices', 'getServices',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const services = this.opsServerRef.oneboxRef.services.listServices(); const services = this.opsServerRef.oneboxRef.services.listServices();
return { services }; return { services };
}, },
@@ -30,7 +30,7 @@ export class ServicesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetService>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetService>(
'getService', 'getService',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName); const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName);
if (!service) { if (!service) {
throw new plugins.typedrequest.TypedResponseError('Service not found'); throw new plugins.typedrequest.TypedResponseError('Service not found');
@@ -45,7 +45,7 @@ export class ServicesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_CreateService>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_CreateService>(
'createService', 'createService',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const service = await this.opsServerRef.oneboxRef.services.deployService(dataArg.serviceConfig); const service = await this.opsServerRef.oneboxRef.services.deployService(dataArg.serviceConfig);
return { service }; return { service };
}, },
@@ -57,7 +57,7 @@ export class ServicesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_UpdateService>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_UpdateService>(
'updateService', 'updateService',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const service = await this.opsServerRef.oneboxRef.services.updateService( const service = await this.opsServerRef.oneboxRef.services.updateService(
dataArg.serviceName, dataArg.serviceName,
dataArg.updates, dataArg.updates,
@@ -72,7 +72,7 @@ export class ServicesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_DeleteService>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_DeleteService>(
'deleteService', 'deleteService',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
await this.opsServerRef.oneboxRef.services.removeService(dataArg.serviceName); await this.opsServerRef.oneboxRef.services.removeService(dataArg.serviceName);
return { ok: true }; return { ok: true };
}, },
@@ -84,7 +84,7 @@ export class ServicesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_StartService>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_StartService>(
'startService', 'startService',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
await this.opsServerRef.oneboxRef.services.startService(dataArg.serviceName); await this.opsServerRef.oneboxRef.services.startService(dataArg.serviceName);
const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName); const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName);
return { service: service! }; return { service: service! };
@@ -97,7 +97,7 @@ export class ServicesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_StopService>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_StopService>(
'stopService', 'stopService',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
await this.opsServerRef.oneboxRef.services.stopService(dataArg.serviceName); await this.opsServerRef.oneboxRef.services.stopService(dataArg.serviceName);
const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName); const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName);
return { service: service! }; return { service: service! };
@@ -110,7 +110,7 @@ export class ServicesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_RestartService>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_RestartService>(
'restartService', 'restartService',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
await this.opsServerRef.oneboxRef.services.restartService(dataArg.serviceName); await this.opsServerRef.oneboxRef.services.restartService(dataArg.serviceName);
const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName); const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName);
return { service: service! }; return { service: service! };
@@ -123,7 +123,7 @@ export class ServicesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServiceLogs>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServiceLogs>(
'getServiceLogs', 'getServiceLogs',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const logs = await this.opsServerRef.oneboxRef.services.getServiceLogs(dataArg.serviceName); const logs = await this.opsServerRef.oneboxRef.services.getServiceLogs(dataArg.serviceName);
return { logs: String(logs) }; return { logs: String(logs) };
}, },
@@ -135,7 +135,7 @@ export class ServicesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServiceStats>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServiceStats>(
'getServiceStats', 'getServiceStats',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName); const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName);
if (!service || !service.containerID) { if (!service || !service.containerID) {
throw new plugins.typedrequest.TypedResponseError('Service has no container'); throw new plugins.typedrequest.TypedResponseError('Service has no container');
@@ -154,7 +154,7 @@ export class ServicesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServiceMetrics>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServiceMetrics>(
'getServiceMetrics', 'getServiceMetrics',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName); const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName);
if (!service || !service.id) { if (!service || !service.id) {
throw new plugins.typedrequest.TypedResponseError('Service not found'); throw new plugins.typedrequest.TypedResponseError('Service not found');
@@ -170,7 +170,7 @@ export class ServicesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServicePlatformResources>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServicePlatformResources>(
'getServicePlatformResources', 'getServicePlatformResources',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const rawResources = await this.opsServerRef.oneboxRef.services.getServicePlatformResources( const rawResources = await this.opsServerRef.oneboxRef.services.getServicePlatformResources(
dataArg.serviceName, dataArg.serviceName,
); );
@@ -204,7 +204,7 @@ export class ServicesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServiceBackups>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServiceBackups>(
'getServiceBackups', 'getServiceBackups',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const backups = this.opsServerRef.oneboxRef.backupManager.listBackups(dataArg.serviceName); const backups = this.opsServerRef.oneboxRef.backupManager.listBackups(dataArg.serviceName);
return { backups }; return { backups };
}, },
@@ -216,7 +216,7 @@ export class ServicesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_CreateServiceBackup>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_CreateServiceBackup>(
'createServiceBackup', 'createServiceBackup',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const result = await this.opsServerRef.oneboxRef.backupManager.createBackup(dataArg.serviceName); const result = await this.opsServerRef.oneboxRef.backupManager.createBackup(dataArg.serviceName);
return { backup: result.backup }; return { backup: result.backup };
}, },
@@ -228,7 +228,7 @@ export class ServicesHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServiceBackupSchedules>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetServiceBackupSchedules>(
'getServiceBackupSchedules', 'getServiceBackupSchedules',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName); const service = this.opsServerRef.oneboxRef.services.getService(dataArg.serviceName);
if (!service) { if (!service) {
throw new plugins.typedrequest.TypedResponseError('Service not found'); throw new plugins.typedrequest.TypedResponseError('Service not found');
+56 -14
View File
@@ -1,7 +1,9 @@
import * as plugins from '../../plugins.ts'; import * as plugins from '../../plugins.ts';
import type { OpsServer } from '../classes.opsserver.ts'; import type { OpsServer } from '../classes.opsserver.ts';
import * as interfaces from '../../../ts_interfaces/index.ts'; import * as interfaces from '../../../ts_interfaces/index.ts';
import { requireValidIdentity } from '../helpers/guards.ts'; import { requireAdminIdentity } from '../helpers/guards.ts';
import { logger } from '../../logging.ts';
import { getErrorMessage } from '../../utils/error.ts';
export class SettingsHandler { export class SettingsHandler {
public typedrouter = new plugins.typedrequest.TypedRouter(); public typedrouter = new plugins.typedrequest.TypedRouter();
@@ -11,13 +13,20 @@ export class SettingsHandler {
this.registerHandlers(); this.registerHandlers();
} }
private getSettingsObject(): interfaces.data.ISettings { private async getSettingsObject(): Promise<interfaces.data.ISettings> {
const db = this.opsServerRef.oneboxRef.database; const db = this.opsServerRef.oneboxRef.database;
const settingsMap = db.getAllSettings(); // Returns Record<string, string> const cloudflareToken = await db.getSecretSetting('cloudflareToken');
const dcrouterGatewayApiToken = await db.getSecretSetting('dcrouterGatewayApiToken');
const settingsMap = db.getAllSettings();
return { return {
cloudflareToken: settingsMap['cloudflareToken'] || '', cloudflareToken: cloudflareToken || '',
cloudflareZoneId: settingsMap['cloudflareZoneId'] || '', cloudflareZoneId: settingsMap['cloudflareZoneId'] || '',
dcrouterGatewayUrl: settingsMap['dcrouterGatewayUrl'] || '',
dcrouterGatewayApiToken: dcrouterGatewayApiToken || '',
dcrouterWorkHosterId: settingsMap['dcrouterWorkHosterId'] || '',
dcrouterTargetHost: settingsMap['dcrouterTargetHost'] || '',
dcrouterTargetPort: parseInt(settingsMap['dcrouterTargetPort'] || '0', 10),
autoRenewCerts: settingsMap['autoRenewCerts'] === 'true', autoRenewCerts: settingsMap['autoRenewCerts'] === 'true',
renewalThreshold: parseInt(settingsMap['renewalThreshold'] || '30', 10), renewalThreshold: parseInt(settingsMap['renewalThreshold'] || '30', 10),
acmeEmail: settingsMap['acmeEmail'] || '', acmeEmail: settingsMap['acmeEmail'] || '',
@@ -32,8 +41,8 @@ export class SettingsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetSettings>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetSettings>(
'getSettings', 'getSettings',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const settings = this.getSettingsObject(); const settings = await this.getSettingsObject();
return { settings }; return { settings };
}, },
), ),
@@ -43,18 +52,28 @@ export class SettingsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_UpdateSettings>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_UpdateSettings>(
'updateSettings', 'updateSettings',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const db = this.opsServerRef.oneboxRef.database; const db = this.opsServerRef.oneboxRef.database;
const updates = dataArg.settings; const updates = dataArg.settings;
// Store each setting as key-value pair // Store each setting as key-value pair
for (const [key, value] of Object.entries(updates)) { for (const [key, value] of Object.entries(updates)) {
if (value !== undefined) { if (value !== undefined) {
db.setSetting(key, String(value)); if (db.isSecretSettingKey(key)) {
await db.setSecretSetting(key, String(value));
} else {
db.setSetting(key, String(value));
}
} }
} }
const settings = this.getSettingsObject(); if (this.hasExternalGatewaySetting(updates)) {
this.refreshExternalGateway().catch((error) => {
logger.warn(`External gateway settings refresh failed: ${getErrorMessage(error)}`);
});
}
const settings = await this.getSettingsObject();
return { settings }; return { settings };
}, },
), ),
@@ -64,8 +83,8 @@ export class SettingsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_SetBackupPassword>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_SetBackupPassword>(
'setBackupPassword', 'setBackupPassword',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
this.opsServerRef.oneboxRef.database.setSetting('backupPassword', dataArg.password); await this.opsServerRef.oneboxRef.database.setSecretSetting('backupPassword', dataArg.password);
return { ok: true }; return { ok: true };
}, },
), ),
@@ -75,12 +94,35 @@ export class SettingsHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetBackupPasswordStatus>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetBackupPasswordStatus>(
'getBackupPasswordStatus', 'getBackupPasswordStatus',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const backupPassword = this.opsServerRef.oneboxRef.database.getSetting('backupPassword'); const isConfigured = await this.opsServerRef.oneboxRef.database.hasSecretSetting('backupPassword');
const isConfigured = !!backupPassword;
return { status: { isConfigured } }; return { status: { isConfigured } };
}, },
), ),
); );
} }
private hasExternalGatewaySetting(settings: Partial<interfaces.data.ISettings>): boolean {
return [
'dcrouterGatewayUrl',
'dcrouterGatewayApiToken',
'dcrouterWorkHosterId',
'dcrouterTargetHost',
'dcrouterTargetPort',
].some((key) => Object.prototype.hasOwnProperty.call(settings, key));
}
private async refreshExternalGateway(): Promise<void> {
const onebox = this.opsServerRef.oneboxRef;
await onebox.externalGateway.syncDomains();
const services = onebox.database.getAllServices().filter((service) => service.domain);
await Promise.all(services.map(async (service) => {
try {
await onebox.externalGateway.syncServiceRoute(service);
} catch (error) {
logger.warn(`Failed to sync external gateway route for ${service.domain}: ${getErrorMessage(error)}`);
}
}));
}
} }
+5 -5
View File
@@ -1,7 +1,7 @@
import * as plugins from '../../plugins.ts'; import * as plugins from '../../plugins.ts';
import type { OpsServer } from '../classes.opsserver.ts'; import type { OpsServer } from '../classes.opsserver.ts';
import * as interfaces from '../../../ts_interfaces/index.ts'; import * as interfaces from '../../../ts_interfaces/index.ts';
import { requireValidIdentity } from '../helpers/guards.ts'; import { requireAdminIdentity } from '../helpers/guards.ts';
export class SslHandler { export class SslHandler {
public typedrouter = new plugins.typedrequest.TypedRouter(); public typedrouter = new plugins.typedrequest.TypedRouter();
@@ -16,7 +16,7 @@ export class SslHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_ObtainCertificate>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_ObtainCertificate>(
'obtainCertificate', 'obtainCertificate',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
await this.opsServerRef.oneboxRef.ssl.obtainCertificate(dataArg.domain, false); await this.opsServerRef.oneboxRef.ssl.obtainCertificate(dataArg.domain, false);
const certificate = this.opsServerRef.oneboxRef.ssl.getCertificate(dataArg.domain); const certificate = this.opsServerRef.oneboxRef.ssl.getCertificate(dataArg.domain);
return { certificate: certificate as unknown as interfaces.data.ICertificate }; return { certificate: certificate as unknown as interfaces.data.ICertificate };
@@ -28,7 +28,7 @@ export class SslHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_ListCertificates>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_ListCertificates>(
'listCertificates', 'listCertificates',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const certificates = this.opsServerRef.oneboxRef.ssl.listCertificates(); const certificates = this.opsServerRef.oneboxRef.ssl.listCertificates();
return { certificates: certificates as unknown as interfaces.data.ICertificate[] }; return { certificates: certificates as unknown as interfaces.data.ICertificate[] };
}, },
@@ -39,7 +39,7 @@ export class SslHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetCertificate>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetCertificate>(
'getCertificate', 'getCertificate',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const certificate = this.opsServerRef.oneboxRef.ssl.getCertificate(dataArg.domain); const certificate = this.opsServerRef.oneboxRef.ssl.getCertificate(dataArg.domain);
if (!certificate) { if (!certificate) {
throw new plugins.typedrequest.TypedResponseError('Certificate not found'); throw new plugins.typedrequest.TypedResponseError('Certificate not found');
@@ -53,7 +53,7 @@ export class SslHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_RenewCertificate>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_RenewCertificate>(
'renewCertificate', 'renewCertificate',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
await this.opsServerRef.oneboxRef.ssl.renewCertificate(dataArg.domain); await this.opsServerRef.oneboxRef.ssl.renewCertificate(dataArg.domain);
const certificate = this.opsServerRef.oneboxRef.ssl.getCertificate(dataArg.domain); const certificate = this.opsServerRef.oneboxRef.ssl.getCertificate(dataArg.domain);
return { certificate: certificate as unknown as interfaces.data.ICertificate }; return { certificate: certificate as unknown as interfaces.data.ICertificate };
+2 -2
View File
@@ -1,7 +1,7 @@
import * as plugins from '../../plugins.ts'; import * as plugins from '../../plugins.ts';
import type { OpsServer } from '../classes.opsserver.ts'; import type { OpsServer } from '../classes.opsserver.ts';
import * as interfaces from '../../../ts_interfaces/index.ts'; import * as interfaces from '../../../ts_interfaces/index.ts';
import { requireValidIdentity } from '../helpers/guards.ts'; import { requireAdminIdentity } from '../helpers/guards.ts';
export class StatusHandler { export class StatusHandler {
public typedrouter = new plugins.typedrequest.TypedRouter(); public typedrouter = new plugins.typedrequest.TypedRouter();
@@ -16,7 +16,7 @@ export class StatusHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetSystemStatus>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_GetSystemStatus>(
'getSystemStatus', 'getSystemStatus',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const status = await this.opsServerRef.oneboxRef.getSystemStatus(); const status = await this.opsServerRef.oneboxRef.getSystemStatus();
return { status: status as unknown as interfaces.data.ISystemStatus }; return { status: status as unknown as interfaces.data.ISystemStatus };
}, },
+8 -8
View File
@@ -2,7 +2,7 @@ import * as plugins from '../../plugins.ts';
import { logger } from '../../logging.ts'; import { logger } from '../../logging.ts';
import type { OpsServer } from '../classes.opsserver.ts'; import type { OpsServer } from '../classes.opsserver.ts';
import * as interfaces from '../../../ts_interfaces/index.ts'; import * as interfaces from '../../../ts_interfaces/index.ts';
import { requireValidIdentity } from '../helpers/guards.ts'; import { requireAdminIdentity } from '../helpers/guards.ts';
import { getErrorMessage } from '../../utils/error.ts'; import { getErrorMessage } from '../../utils/error.ts';
export class WorkspaceHandler { export class WorkspaceHandler {
@@ -30,7 +30,7 @@ export class WorkspaceHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_WorkspaceReadFile>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_WorkspaceReadFile>(
'workspaceReadFile', 'workspaceReadFile',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const containerId = await this.resolveContainerId(dataArg.serviceName); const containerId = await this.resolveContainerId(dataArg.serviceName);
const result = await this.opsServerRef.oneboxRef.docker.execInContainer( const result = await this.opsServerRef.oneboxRef.docker.execInContainer(
containerId, containerId,
@@ -49,7 +49,7 @@ export class WorkspaceHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_WorkspaceWriteFile>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_WorkspaceWriteFile>(
'workspaceWriteFile', 'workspaceWriteFile',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const containerId = await this.resolveContainerId(dataArg.serviceName); const containerId = await this.resolveContainerId(dataArg.serviceName);
// Use sh -c with printf to write content (handles special characters) // Use sh -c with printf to write content (handles special characters)
const escaped = dataArg.content.replace(/'/g, "'\\''"); const escaped = dataArg.content.replace(/'/g, "'\\''");
@@ -70,7 +70,7 @@ export class WorkspaceHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_WorkspaceReadDir>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_WorkspaceReadDir>(
'workspaceReadDir', 'workspaceReadDir',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const containerId = await this.resolveContainerId(dataArg.serviceName); const containerId = await this.resolveContainerId(dataArg.serviceName);
// Use ls with -1 -F to get entries with type indicators (/ for dirs) // Use ls with -1 -F to get entries with type indicators (/ for dirs)
const result = await this.opsServerRef.oneboxRef.docker.execInContainer( const result = await this.opsServerRef.oneboxRef.docker.execInContainer(
@@ -103,7 +103,7 @@ export class WorkspaceHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_WorkspaceMkdir>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_WorkspaceMkdir>(
'workspaceMkdir', 'workspaceMkdir',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const containerId = await this.resolveContainerId(dataArg.serviceName); const containerId = await this.resolveContainerId(dataArg.serviceName);
const result = await this.opsServerRef.oneboxRef.docker.execInContainer( const result = await this.opsServerRef.oneboxRef.docker.execInContainer(
containerId, containerId,
@@ -122,7 +122,7 @@ export class WorkspaceHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_WorkspaceRm>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_WorkspaceRm>(
'workspaceRm', 'workspaceRm',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const containerId = await this.resolveContainerId(dataArg.serviceName); const containerId = await this.resolveContainerId(dataArg.serviceName);
const args = dataArg.recursive ? ['rm', '-rf', dataArg.path] : ['rm', '-f', dataArg.path]; const args = dataArg.recursive ? ['rm', '-rf', dataArg.path] : ['rm', '-f', dataArg.path];
const result = await this.opsServerRef.oneboxRef.docker.execInContainer( const result = await this.opsServerRef.oneboxRef.docker.execInContainer(
@@ -142,7 +142,7 @@ export class WorkspaceHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_WorkspaceExists>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_WorkspaceExists>(
'workspaceExists', 'workspaceExists',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const containerId = await this.resolveContainerId(dataArg.serviceName); const containerId = await this.resolveContainerId(dataArg.serviceName);
const result = await this.opsServerRef.oneboxRef.docker.execInContainer( const result = await this.opsServerRef.oneboxRef.docker.execInContainer(
containerId, containerId,
@@ -158,7 +158,7 @@ export class WorkspaceHandler {
new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_WorkspaceExec>( new plugins.typedrequest.TypedHandler<interfaces.requests.IReq_WorkspaceExec>(
'workspaceExec', 'workspaceExec',
async (dataArg) => { async (dataArg) => {
await requireValidIdentity(this.opsServerRef.adminHandler, dataArg); await requireAdminIdentity(this.opsServerRef.adminHandler, dataArg);
const containerId = await this.resolveContainerId(dataArg.serviceName); const containerId = await this.resolveContainerId(dataArg.serviceName);
const cmd = dataArg.args const cmd = dataArg.args
? [dataArg.command, ...dataArg.args] ? [dataArg.command, ...dataArg.args]
+4 -16
View File
@@ -5,25 +5,13 @@ import * as interfaces from '../../../ts_interfaces/index.ts';
export async function requireValidIdentity<T extends { identity?: interfaces.data.IIdentity }>( export async function requireValidIdentity<T extends { identity?: interfaces.data.IIdentity }>(
adminHandler: AdminHandler, adminHandler: AdminHandler,
dataArg: T, dataArg: T,
): Promise<void> { ): Promise<interfaces.data.IIdentity> {
if (!dataArg.identity) { return await adminHandler.getVerifiedIdentity(dataArg.identity);
throw new plugins.typedrequest.TypedResponseError('No identity provided');
}
const passed = await adminHandler.validIdentityGuard.exec({ identity: dataArg.identity });
if (!passed) {
throw new plugins.typedrequest.TypedResponseError('Valid identity required');
}
} }
export async function requireAdminIdentity<T extends { identity?: interfaces.data.IIdentity }>( export async function requireAdminIdentity<T extends { identity?: interfaces.data.IIdentity }>(
adminHandler: AdminHandler, adminHandler: AdminHandler,
dataArg: T, dataArg: T,
): Promise<void> { ): Promise<interfaces.data.IIdentity> {
if (!dataArg.identity) { return await adminHandler.getVerifiedAdminIdentity(dataArg.identity);
throw new plugins.typedrequest.TypedResponseError('No identity provided');
}
const passed = await adminHandler.adminIdentityGuard.exec({ identity: dataArg.identity });
if (!passed) {
throw new plugins.typedrequest.TypedResponseError('Admin access required');
}
} }
+25 -6
View File
@@ -34,17 +34,27 @@ import * as smartregistry from '@push.rocks/smartregistry';
export { smartregistry }; export { smartregistry };
// S3-compatible storage server // S3-compatible storage server
import * as smarts3 from '@push.rocks/smarts3'; import * as smartstorage from '@push.rocks/smartstorage';
export { smarts3 }; export { smartstorage };
// AWS S3 client for S3-compatible object operations
import {
S3Client,
ListObjectsV2Command,
GetObjectCommand,
PutObjectCommand,
} from 'npm:@aws-sdk/client-s3@3.1009.0';
export const awsS3 = {
S3Client,
ListObjectsV2Command,
GetObjectCommand,
PutObjectCommand,
};
// Task scheduling and cron jobs // Task scheduling and cron jobs
import * as taskbuffer from '@push.rocks/taskbuffer'; import * as taskbuffer from '@push.rocks/taskbuffer';
export { taskbuffer }; export { taskbuffer };
// Crypto utilities (for password hashing, encryption)
import * as bcrypt from 'https://deno.land/x/bcrypt@v0.4.1/mod.ts';
export { bcrypt };
// JWT for authentication // JWT for authentication
import * as jwt from 'https://deno.land/x/djwt@v3.0.2/mod.ts'; import * as jwt from 'https://deno.land/x/djwt@v3.0.2/mod.ts';
export { jwt}; export { jwt};
@@ -67,3 +77,12 @@ export { typedrequest, typedserver };
import * as smartguard from '@push.rocks/smartguard'; import * as smartguard from '@push.rocks/smartguard';
import * as smartjwt from '@push.rocks/smartjwt'; import * as smartjwt from '@push.rocks/smartjwt';
export { smartguard, smartjwt }; export { smartguard, smartjwt };
// Backup archive (content-addressed dedup storage)
import { ContainerArchive } from '@serve.zone/containerarchive';
export { ContainerArchive };
// Node.js compat for streaming
import * as nodeFs from 'node:fs';
import * as nodeStream from 'node:stream';
export { nodeFs, nodeStream };
+27 -7
View File
@@ -25,6 +25,9 @@ export interface IService {
platformRequirements?: IPlatformRequirements; platformRequirements?: IPlatformRequirements;
// Backup settings // Backup settings
includeImageInBackup?: boolean; includeImageInBackup?: boolean;
// App Store template tracking
appTemplateId?: string;
appTemplateVersion?: string;
} }
// Registry types // Registry types
@@ -75,7 +78,7 @@ export interface ITokenCreatedResponse {
} }
// Platform service types // Platform service types
export type TPlatformServiceType = 'mongodb' | 'minio' | 'redis' | 'postgresql' | 'rabbitmq' | 'caddy' | 'clickhouse'; export type TPlatformServiceType = 'mongodb' | 'minio' | 'redis' | 'postgresql' | 'rabbitmq' | 'smartproxy' | 'clickhouse' | 'mariadb';
export type TPlatformResourceType = 'database' | 'bucket' | 'cache' | 'queue'; export type TPlatformResourceType = 'database' | 'bucket' | 'cache' | 'queue';
export type TPlatformServiceStatus = 'stopped' | 'starting' | 'running' | 'stopping' | 'failed'; export type TPlatformServiceStatus = 'stopped' | 'starting' | 'running' | 'stopping' | 'failed';
@@ -113,6 +116,8 @@ export interface IPlatformRequirements {
mongodb?: boolean; mongodb?: boolean;
s3?: boolean; s3?: boolean;
clickhouse?: boolean; clickhouse?: boolean;
redis?: boolean;
mariadb?: boolean;
} }
export interface IProvisionedResource { export interface IProvisionedResource {
@@ -143,7 +148,7 @@ export interface INginxConfig {
export interface IDomain { export interface IDomain {
id?: number; id?: number;
domain: string; domain: string;
dnsProvider: 'cloudflare' | 'manual' | null; dnsProvider: 'cloudflare' | 'manual' | 'dcrouter' | null;
cloudflareZoneId?: string; cloudflareZoneId?: string;
isObsolete: boolean; isObsolete: boolean;
defaultWildcard: boolean; defaultWildcard: boolean;
@@ -252,14 +257,21 @@ export interface ISetting {
// Application settings // Application settings
export interface IAppSettings { export interface IAppSettings {
serverIP?: string; serverIP?: string;
cloudflareAPIKey?: string; cloudflareToken?: string;
cloudflareEmail?: string; cloudflareZoneId?: string;
cloudflareZoneID?: string; dcrouterGatewayUrl?: string;
dcrouterGatewayApiToken?: string;
dcrouterWorkHosterId?: string;
dcrouterTargetHost?: string;
dcrouterTargetPort?: number;
acmeEmail?: string; acmeEmail?: string;
nginxConfigDir?: string;
dataDir?: string; dataDir?: string;
httpPort?: number; httpPort?: number;
httpsPort?: number;
metricsInterval?: number; metricsInterval?: number;
autoRenewCerts?: boolean;
renewalThreshold?: number;
forceHttps?: boolean;
logRetentionDays?: number; logRetentionDays?: number;
} }
@@ -291,6 +303,11 @@ export interface IServiceDeployOptions {
enableMongoDB?: boolean; enableMongoDB?: boolean;
enableS3?: boolean; enableS3?: boolean;
enableClickHouse?: boolean; enableClickHouse?: boolean;
enableRedis?: boolean;
enableMariaDB?: boolean;
// App Store template tracking
appTemplateId?: string;
appTemplateVersion?: string;
} }
// HTTP API request/response types // HTTP API request/response types
@@ -346,7 +363,9 @@ export interface IBackup {
serviceId: number; serviceId: number;
serviceName: string; // Denormalized for display serviceName: string; // Denormalized for display
filename: string; filename: string;
snapshotId?: string; // ContainerArchive snapshot ID (new backups)
sizeBytes: number; sizeBytes: number;
storedSizeBytes?: number; // Actual stored size after dedup+compression
createdAt: number; createdAt: number;
includesImage: boolean; includesImage: boolean;
platformResources: TPlatformServiceType[]; // Which platform types were backed up platformResources: TPlatformServiceType[]; // Which platform types were backed up
@@ -389,7 +408,8 @@ export interface IBackupPlatformResource {
export interface IBackupResult { export interface IBackupResult {
backup: IBackup; backup: IBackup;
filePath: string; filePath?: string; // Legacy file-based backups only
snapshotId?: string; // ContainerArchive snapshot ID
} }
export interface IRestoreOptions { export interface IRestoreOptions {
+94
View File
@@ -0,0 +1,94 @@
const pbkdf2HashPattern = /^pbkdf2-sha256\$(\d+)\$([A-Za-z0-9+/=]+)\$([A-Za-z0-9+/=]+)$/;
const pbkdf2Iterations = 210_000;
const pbkdf2KeyLengthBits = 256;
const bytesToBase64 = (bytesArg: Uint8Array): string => {
let binary = '';
for (const byte of bytesArg) {
binary += String.fromCharCode(byte);
}
return btoa(binary);
};
const base64ToBytes = (base64Arg: string): Uint8Array => {
const binary = atob(base64Arg);
const bytes = new Uint8Array(binary.length);
for (let i = 0; i < binary.length; i++) {
bytes[i] = binary.charCodeAt(i);
}
return bytes;
};
const timingSafeEqual = (aArg: Uint8Array, bArg: Uint8Array): boolean => {
if (aArg.length !== bArg.length) {
return false;
}
let diff = 0;
for (let i = 0; i < aArg.length; i++) {
diff |= aArg[i] ^ bArg[i];
}
return diff === 0;
};
const toArrayBuffer = (bytesArg: Uint8Array): ArrayBuffer => {
return bytesArg.buffer.slice(
bytesArg.byteOffset,
bytesArg.byteOffset + bytesArg.byteLength,
) as ArrayBuffer;
};
const derivePasswordHash = async (
passwordArg: string,
saltArg: Uint8Array,
iterationsArg: number,
): Promise<Uint8Array> => {
const key = await crypto.subtle.importKey(
'raw',
new TextEncoder().encode(passwordArg),
'PBKDF2',
false,
['deriveBits'],
);
const bits = await crypto.subtle.deriveBits(
{
name: 'PBKDF2',
hash: 'SHA-256',
salt: toArrayBuffer(saltArg),
iterations: iterationsArg,
},
key,
pbkdf2KeyLengthBits,
);
return new Uint8Array(bits);
};
export function isPbkdf2Hash(passwordHash: string): boolean {
return pbkdf2HashPattern.test(passwordHash);
}
export async function hashPassword(password: string): Promise<string> {
// Use Web Crypto only so compiled binaries do not depend on external worker files.
const salt = crypto.getRandomValues(new Uint8Array(16));
const hash = await derivePasswordHash(password, salt, pbkdf2Iterations);
return `pbkdf2-sha256$${pbkdf2Iterations}$${bytesToBase64(salt)}$${bytesToBase64(hash)}`;
}
export async function verifyPassword(password: string, passwordHash: string): Promise<boolean> {
if (!passwordHash) {
return false;
}
const pbkdf2Match = passwordHash.match(pbkdf2HashPattern);
if (pbkdf2Match) {
const iterations = Number(pbkdf2Match[1]);
const salt = base64ToBytes(pbkdf2Match[2]);
const expectedHash = base64ToBytes(pbkdf2Match[3]);
const actualHash = await derivePasswordHash(password, salt, iterations);
return timingSafeEqual(actualHash, expectedHash);
}
return false;
}
+1 -1
View File
File diff suppressed because one or more lines are too long
+2
View File
@@ -28,7 +28,9 @@ export interface IBackup {
serviceId: number; serviceId: number;
serviceName: string; serviceName: string;
filename: string; filename: string;
snapshotId?: string;
sizeBytes: number; sizeBytes: number;
storedSizeBytes?: number;
createdAt: number; createdAt: number;
includesImage: boolean; includesImage: boolean;
platformResources: TPlatformServiceType[]; platformResources: TPlatformServiceType[];
+1 -1
View File
@@ -5,7 +5,7 @@
export interface IDomain { export interface IDomain {
id?: number; id?: number;
domain: string; domain: string;
dnsProvider: 'cloudflare' | 'manual' | null; dnsProvider: 'cloudflare' | 'manual' | 'dcrouter' | null;
cloudflareZoneId?: string; cloudflareZoneId?: string;
isObsolete: boolean; isObsolete: boolean;
defaultWildcard: boolean; defaultWildcard: boolean;
+2 -2
View File
@@ -41,7 +41,7 @@ export interface ITrafficStats {
errorRate: number; errorRate: number;
} }
export interface ICaddyAccessLog { export interface IProxyAccessLog {
ts: number; ts: number;
request: { request: {
remote_ip: string; remote_ip: string;
@@ -59,6 +59,6 @@ export interface INetworkLogMessage {
type: 'connected' | 'access_log' | 'filter_updated'; type: 'connected' | 'access_log' | 'filter_updated';
clientId?: string; clientId?: string;
filter?: { domain?: string; sampleRate?: number }; filter?: { domain?: string; sampleRate?: number };
data?: ICaddyAccessLog; data?: IProxyAccessLog;
timestamp: number; timestamp: number;
} }
+3 -1
View File
@@ -2,7 +2,7 @@
* Platform service data shapes for Onebox * Platform service data shapes for Onebox
*/ */
export type TPlatformServiceType = 'mongodb' | 'minio' | 'redis' | 'postgresql' | 'rabbitmq' | 'caddy' | 'clickhouse'; export type TPlatformServiceType = 'mongodb' | 'minio' | 'redis' | 'postgresql' | 'rabbitmq' | 'smartproxy' | 'clickhouse' | 'mariadb';
export type TPlatformServiceStatus = 'not-deployed' | 'stopped' | 'starting' | 'running' | 'stopping' | 'failed'; export type TPlatformServiceStatus = 'not-deployed' | 'stopped' | 'starting' | 'running' | 'stopping' | 'failed';
export type TPlatformResourceType = 'database' | 'bucket' | 'cache' | 'queue'; export type TPlatformResourceType = 'database' | 'bucket' | 'cache' | 'queue';
@@ -10,6 +10,8 @@ export interface IPlatformRequirements {
mongodb?: boolean; mongodb?: boolean;
s3?: boolean; s3?: boolean;
clickhouse?: boolean; clickhouse?: boolean;
redis?: boolean;
mariadb?: boolean;
} }
export interface IPlatformService { export interface IPlatformService {
+7
View File
@@ -28,6 +28,9 @@ export interface IService {
platformRequirements?: IPlatformRequirements; platformRequirements?: IPlatformRequirements;
// Backup settings // Backup settings
includeImageInBackup?: boolean; includeImageInBackup?: boolean;
// App Store template tracking
appTemplateId?: string;
appTemplateVersion?: string;
} }
export interface IServiceCreate { export interface IServiceCreate {
@@ -42,6 +45,10 @@ export interface IServiceCreate {
enableMongoDB?: boolean; enableMongoDB?: boolean;
enableS3?: boolean; enableS3?: boolean;
enableClickHouse?: boolean; enableClickHouse?: boolean;
enableRedis?: boolean;
enableMariaDB?: boolean;
appTemplateId?: string;
appTemplateVersion?: string;
} }
export interface IServiceUpdate { export interface IServiceUpdate {
+5
View File
@@ -5,6 +5,11 @@
export interface ISettings { export interface ISettings {
cloudflareToken: string; cloudflareToken: string;
cloudflareZoneId: string; cloudflareZoneId: string;
dcrouterGatewayUrl: string;
dcrouterGatewayApiToken: string;
dcrouterWorkHosterId: string;
dcrouterTargetHost: string;
dcrouterTargetPort: number;
autoRenewCerts: boolean; autoRenewCerts: boolean;
renewalThreshold: number; renewalThreshold: number;
acmeEmail: string; acmeEmail: string;
+106
View File
@@ -0,0 +1,106 @@
import * as plugins from '../plugins.ts';
import * as data from '../data/index.ts';
export interface ICatalogApp {
id: string;
name: string;
description: string;
category: string;
iconName?: string;
iconUrl?: string;
latestVersion: string;
tags?: string[];
}
export interface IAppVersionConfig {
image: string;
port: number;
envVars?: Array<{ key: string; value: string; description: string; required?: boolean }>;
volumes?: string[];
platformRequirements?: {
mongodb?: boolean;
s3?: boolean;
clickhouse?: boolean;
redis?: boolean;
mariadb?: boolean;
};
minOneboxVersion?: string;
}
export interface IAppMeta {
id: string;
name: string;
description: string;
category: string;
iconName?: string;
latestVersion: string;
versions: string[];
maintainer?: string;
links?: Record<string, string>;
}
export interface IUpgradeableService {
serviceName: string;
appTemplateId: string;
currentVersion: string;
latestVersion: string;
hasMigration: boolean;
}
export interface IReq_GetAppTemplates extends plugins.typedrequestInterfaces.implementsTR<
plugins.typedrequestInterfaces.ITypedRequest,
IReq_GetAppTemplates
> {
method: 'getAppTemplates';
request: {
identity: data.IIdentity;
};
response: {
apps: ICatalogApp[];
};
}
export interface IReq_GetAppConfig extends plugins.typedrequestInterfaces.implementsTR<
plugins.typedrequestInterfaces.ITypedRequest,
IReq_GetAppConfig
> {
method: 'getAppConfig';
request: {
identity: data.IIdentity;
appId: string;
version: string;
};
response: {
config: IAppVersionConfig;
appMeta: IAppMeta;
};
}
export interface IReq_GetUpgradeableServices extends plugins.typedrequestInterfaces.implementsTR<
plugins.typedrequestInterfaces.ITypedRequest,
IReq_GetUpgradeableServices
> {
method: 'getUpgradeableServices';
request: {
identity: data.IIdentity;
};
response: {
services: IUpgradeableService[];
};
}
export interface IReq_UpgradeService extends plugins.typedrequestInterfaces.implementsTR<
plugins.typedrequestInterfaces.ITypedRequest,
IReq_UpgradeService
> {
method: 'upgradeService';
request: {
identity: data.IIdentity;
serviceName: string;
targetVersion: string;
};
response: {
service: data.IService;
warnings: string[];
};
}
+1
View File
@@ -12,3 +12,4 @@ export * from './backup-schedules.ts';
export * from './settings.ts'; export * from './settings.ts';
export * from './logs.ts'; export * from './logs.ts';
export * from './workspace.ts'; export * from './workspace.ts';
export * from './appstore.ts';
+13
View File
@@ -228,3 +228,16 @@ export interface IReq_PushServiceLog extends plugins.typedrequestInterfaces.impl
}; };
response: {}; response: {};
} }
export interface IReq_PushServiceUpdate extends plugins.typedrequestInterfaces.implementsTR<
plugins.typedrequestInterfaces.ITypedRequest,
IReq_PushServiceUpdate
> {
method: 'pushServiceUpdate';
request: {
action: 'created' | 'updated' | 'deleted' | 'started' | 'stopped';
serviceName: string;
service?: data.IService;
};
response: {};
}
+1 -1
View File
@@ -3,6 +3,6 @@
*/ */
export const commitinfo = { export const commitinfo = {
name: '@serve.zone/onebox', name: '@serve.zone/onebox',
version: '1.22.2', version: '1.24.2',
description: 'Self-hosted container platform with automatic SSL and DNS - a mini Heroku for single servers' description: 'Self-hosted container platform with automatic SSL and DNS - a mini Heroku for single servers'
} }
+128 -1
View File
@@ -54,6 +54,11 @@ export interface ISettingsState {
backupPasswordConfigured: boolean; backupPasswordConfigured: boolean;
} }
export interface IAppStoreState {
apps: interfaces.requests.ICatalogApp[];
upgradeableServices: interfaces.requests.IUpgradeableService[];
}
export interface IUiState { export interface IUiState {
activeView: string; activeView: string;
autoRefresh: boolean; autoRefresh: boolean;
@@ -137,6 +142,15 @@ export const settingsStatePart = await appState.getStatePart<ISettingsState>(
'soft', 'soft',
); );
export const appStoreStatePart = await appState.getStatePart<IAppStoreState>(
'appStore',
{
apps: [],
upgradeableServices: [],
},
'soft',
);
export const uiStatePart = await appState.getStatePart<IUiState>( export const uiStatePart = await appState.getStatePart<IUiState>(
'ui', 'ui',
{ {
@@ -914,7 +928,8 @@ export const setBackupPasswordAction = settingsStatePart.createAction<{ password
export const setActiveViewAction = uiStatePart.createAction<{ view: string }>( export const setActiveViewAction = uiStatePart.createAction<{ view: string }>(
async (statePartArg, dataArg) => { async (statePartArg, dataArg) => {
return { ...statePartArg.getState(), activeView: dataArg.view }; const normalizedView = dataArg.view.toLowerCase().replace(/\s+/g, '-');
return { ...statePartArg.getState(), activeView: normalizedView };
}, },
); );
@@ -970,6 +985,56 @@ startAutoRefresh();
let socketClient: InstanceType<typeof plugins.typedsocket.TypedSocket> | null = null; let socketClient: InstanceType<typeof plugins.typedsocket.TypedSocket> | null = null;
const socketRouter = new plugins.domtools.plugins.typedrequest.TypedRouter(); const socketRouter = new plugins.domtools.plugins.typedrequest.TypedRouter();
const upsertService = (
services: interfaces.data.IService[],
service: interfaces.data.IService,
): interfaces.data.IService[] => {
const existingIndex = services.findIndex((item) => item.name === service.name);
if (existingIndex === -1) {
return [...services, service];
}
const updatedServices = [...services];
updatedServices[existingIndex] = service;
return updatedServices;
};
socketRouter.addTypedHandler(
new plugins.domtools.plugins.typedrequest.TypedHandler<interfaces.requests.IReq_PushServiceUpdate>(
'pushServiceUpdate',
async (dataArg) => {
const state = servicesStatePart.getState();
let services = state.services;
let currentService = state.currentService;
let currentServiceLogs = state.currentServiceLogs;
let currentServiceStats = state.currentServiceStats;
if (dataArg.action === 'deleted') {
services = services.filter((service) => service.name !== dataArg.serviceName);
if (currentService?.name === dataArg.serviceName) {
currentService = null;
currentServiceLogs = [];
currentServiceStats = null;
}
} else if (dataArg.service) {
services = upsertService(services, dataArg.service);
if (currentService?.name === dataArg.service.name) {
currentService = dataArg.service;
}
}
servicesStatePart.setState({
...state,
services,
currentService,
currentServiceLogs,
currentServiceStats,
});
return {};
},
),
);
// Handle server-pushed platform service log entries // Handle server-pushed platform service log entries
socketRouter.addTypedHandler( socketRouter.addTypedHandler(
new plugins.domtools.plugins.typedrequest.TypedHandler<interfaces.requests.IReq_PushPlatformServiceLog>( new plugins.domtools.plugins.typedrequest.TypedHandler<interfaces.requests.IReq_PushPlatformServiceLog>(
@@ -1055,6 +1120,68 @@ async function disconnectSocket() {
} }
} }
// ============================================================================
// App Store Actions
// ============================================================================
export const fetchAppTemplatesAction = appStoreStatePart.createAction(
async (statePartArg) => {
const context = getActionContext();
try {
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
interfaces.requests.IReq_GetAppTemplates
>('/typedrequest', 'getAppTemplates');
const response = await typedRequest.fire({ identity: context.identity! });
return { ...statePartArg.getState(), apps: response.apps };
} catch (err) {
console.error('Failed to fetch app templates:', err);
return statePartArg.getState();
}
},
);
export const fetchUpgradeableServicesAction = appStoreStatePart.createAction(
async (statePartArg) => {
const context = getActionContext();
try {
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
interfaces.requests.IReq_GetUpgradeableServices
>('/typedrequest', 'getUpgradeableServices');
const response = await typedRequest.fire({ identity: context.identity! });
return { ...statePartArg.getState(), upgradeableServices: response.services };
} catch (err) {
console.error('Failed to fetch upgradeable services:', err);
return statePartArg.getState();
}
},
);
export const upgradeServiceAction = appStoreStatePart.createAction<{
serviceName: string;
targetVersion: string;
}>(async (statePartArg, dataArg) => {
const context = getActionContext();
try {
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
interfaces.requests.IReq_UpgradeService
>('/typedrequest', 'upgradeService');
await typedRequest.fire({
identity: context.identity!,
serviceName: dataArg.serviceName,
targetVersion: dataArg.targetVersion,
});
// Re-fetch upgradeable services and services list
const upgradeReq = new plugins.domtools.plugins.typedrequest.TypedRequest<
interfaces.requests.IReq_GetUpgradeableServices
>('/typedrequest', 'getUpgradeableServices');
const upgradeResp = await upgradeReq.fire({ identity: context.identity! });
return { ...statePartArg.getState(), upgradeableServices: upgradeResp.services };
} catch (err) {
console.error('Failed to upgrade service:', err);
return statePartArg.getState();
}
});
// Connect socket when logged in, disconnect when logged out // Connect socket when logged in, disconnect when logged out
loginStatePart.select((s) => s).subscribe((loginState) => { loginStatePart.select((s) => s).subscribe((loginState) => {
if (loginState.isLoggedIn) { if (loginState.isLoggedIn) {
+24 -14
View File
@@ -1,6 +1,7 @@
import * as plugins from '../plugins.js'; import * as plugins from '../plugins.js';
import * as appstate from '../appstate.js'; import * as appstate from '../appstate.js';
import * as interfaces from '../../ts_interfaces/index.js'; import * as interfaces from '../../ts_interfaces/index.js';
import { appRouter } from '../router.js';
import { import {
DeesElement, DeesElement,
customElement, customElement,
@@ -93,6 +94,9 @@ export class ObAppShell extends DeesElement {
<dees-simple-appdash <dees-simple-appdash
name="Onebox" name="Onebox"
.viewTabs=${this.resolvedViewTabs} .viewTabs=${this.resolvedViewTabs}
.selectedView=${this.resolvedViewTabs.find(
(t) => t.name.toLowerCase().replace(/\s+/g, '-') === this.uiState.activeView
) || this.resolvedViewTabs[0]}
> >
</dees-simple-appdash> </dees-simple-appdash>
</dees-simple-login> </dees-simple-login>
@@ -122,8 +126,8 @@ export class ObAppShell extends DeesElement {
const appDash = this.shadowRoot!.querySelector('dees-simple-appdash') as any; const appDash = this.shadowRoot!.querySelector('dees-simple-appdash') as any;
if (appDash) { if (appDash) {
appDash.addEventListener('view-select', (e: CustomEvent) => { appDash.addEventListener('view-select', (e: CustomEvent) => {
const viewName = e.detail.view.name.toLowerCase(); const viewName = e.detail.view.name.toLowerCase().replace(/\s+/g, '-');
appstate.uiStatePart.dispatchAction(appstate.setActiveViewAction, { view: viewName }); appRouter.navigateToView(viewName);
}); });
appDash.addEventListener('logout', async () => { appDash.addEventListener('logout', async () => {
await appstate.loginStatePart.dispatchAction(appstate.logoutAction, null); await appstate.loginStatePart.dispatchAction(appstate.logoutAction, null);
@@ -131,10 +135,11 @@ export class ObAppShell extends DeesElement {
} }
// Load the initial view on the appdash now that tabs are resolved // Load the initial view on the appdash now that tabs are resolved
// (appdash's own firstUpdated already fired when viewTabs was still empty) // Read activeView directly from state (not this.uiState which may be stale)
if (appDash && this.resolvedViewTabs.length > 0) { if (appDash && this.resolvedViewTabs.length > 0) {
const currentActiveView = appstate.uiStatePart.getState().activeView;
const initialView = this.resolvedViewTabs.find( const initialView = this.resolvedViewTabs.find(
(t) => t.name.toLowerCase() === this.uiState.activeView, (t) => t.name.toLowerCase().replace(/\s+/g, '-') === currentActiveView,
) || this.resolvedViewTabs[0]; ) || this.resolvedViewTabs[0];
await appDash.loadView(initialView); await appDash.loadView(initialView);
} }
@@ -143,23 +148,26 @@ export class ObAppShell extends DeesElement {
const loginState = appstate.loginStatePart.getState(); const loginState = appstate.loginStatePart.getState();
if (loginState.identity?.jwt) { if (loginState.identity?.jwt) {
if (loginState.identity.expiresAt > Date.now()) { if (loginState.identity.expiresAt > Date.now()) {
// Validate token with server before switching to dashboard // Switch to dashboard immediately (no flash of login form)
// (server may have restarted with a new JWT secret) this.loginState = loginState;
if (simpleLogin) {
await simpleLogin.switchToSlottedContent();
}
// Validate token with server in the background
try { try {
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest< const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
interfaces.requests.IReq_GetSystemStatus interfaces.requests.IReq_GetSystemStatus
>('/typedrequest', 'getSystemStatus'); >('/typedrequest', 'getSystemStatus');
const response = await typedRequest.fire({ identity: loginState.identity }); const response = await typedRequest.fire({ identity: loginState.identity });
// Token is valid - switch to dashboard
appstate.systemStatePart.setState({ status: response.status }); appstate.systemStatePart.setState({ status: response.status });
this.loginState = loginState;
if (simpleLogin) {
await simpleLogin.switchToSlottedContent();
}
} catch (err) { } catch (err) {
// Token rejected by server - clear session // Token rejected by server - switch back to login
console.warn('Stored session invalid, returning to login:', err); console.warn('Stored session invalid, returning to login:', err);
await appstate.loginStatePart.dispatchAction(appstate.logoutAction, null); await appstate.loginStatePart.dispatchAction(appstate.logoutAction, null);
if (simpleLogin) {
// Force page reload to show login properly
window.location.reload();
}
} }
} else { } else {
await appstate.loginStatePart.dispatchAction(appstate.logoutAction, null); await appstate.loginStatePart.dispatchAction(appstate.logoutAction, null);
@@ -201,9 +209,11 @@ export class ObAppShell extends DeesElement {
private syncAppdashView(viewName: string): void { private syncAppdashView(viewName: string): void {
const appDash = this.shadowRoot?.querySelector('dees-simple-appdash') as any; const appDash = this.shadowRoot?.querySelector('dees-simple-appdash') as any;
if (!appDash || this.resolvedViewTabs.length === 0) return; if (!appDash || this.resolvedViewTabs.length === 0) return;
const targetTab = this.resolvedViewTabs.find((t) => t.name.toLowerCase() === viewName); // Match kebab-case view name (e.g., 'app-store') to tab name (e.g., 'App Store')
const targetTab = this.resolvedViewTabs.find(
(t) => t.name.toLowerCase().replace(/\s+/g, '-') === viewName
);
if (!targetTab) return; if (!targetTab) return;
// Use appdash's own loadView method for proper view management
appDash.loadView(targetTab); appDash.loadView(targetTab);
} }
} }
+618 -187
View File
@@ -2,6 +2,7 @@ import * as plugins from '../plugins.js';
import * as shared from './shared/index.js'; import * as shared from './shared/index.js';
import * as appstate from '../appstate.js'; import * as appstate from '../appstate.js';
import * as interfaces from '../../ts_interfaces/index.js'; import * as interfaces from '../../ts_interfaces/index.js';
import { appRouter } from '../router.js';
import { import {
DeesElement, DeesElement,
customElement, customElement,
@@ -12,213 +13,643 @@ import {
type TemplateResult, type TemplateResult,
} from '@design.estate/dees-element'; } from '@design.estate/dees-element';
// App template definitions — curated Docker apps
const appTemplates = [
{
id: 'nginx',
name: 'Nginx',
description: 'High-performance web server and reverse proxy. Lightweight, fast, and battle-tested.',
category: 'Web Server',
iconName: 'globe',
image: 'nginx:alpine',
port: 80,
},
{
id: 'wordpress',
name: 'WordPress',
description: 'The world\'s most popular content management system. Powers over 40% of the web.',
category: 'CMS',
iconName: 'file-text',
image: 'wordpress:latest',
port: 80,
enableMongoDB: false,
envVars: [
{ key: 'WORDPRESS_DB_HOST', value: '', description: 'Database host', required: true },
{ key: 'WORDPRESS_DB_USER', value: 'wordpress', description: 'Database user' },
{ key: 'WORDPRESS_DB_PASSWORD', value: '', description: 'Database password', required: true },
{ key: 'WORDPRESS_DB_NAME', value: 'wordpress', description: 'Database name' },
],
},
{
id: 'ghost',
name: 'Ghost',
description: 'Modern publishing platform for creating professional blogs and newsletters.',
category: 'CMS',
iconName: 'book-open',
image: 'ghost:latest',
port: 2368,
envVars: [
{ key: 'database__client', value: 'sqlite3', description: 'Database client (sqlite3 for standalone)' },
{ key: 'database__connection__filename', value: '/var/lib/ghost/content/data/ghost.db', description: 'SQLite database path' },
{ key: 'url', value: 'http://localhost:2368', description: 'Public URL of the blog' },
],
},
{
id: 'gitea',
name: 'Gitea',
description: 'Lightweight self-hosted Git service. Easy to install and maintain.',
category: 'Dev Tools',
iconName: 'git-branch',
image: 'gitea/gitea:latest',
port: 3000,
},
{
id: 'nextcloud',
name: 'Nextcloud',
description: 'Self-hosted file sync and share platform. Your own private cloud.',
category: 'Storage',
iconName: 'package',
image: 'nextcloud:latest',
port: 80,
},
{
id: 'grafana',
name: 'Grafana',
description: 'Open-source observability platform for metrics, logs, and traces visualization.',
category: 'Monitoring',
iconName: 'monitor',
image: 'grafana/grafana:latest',
port: 3000,
envVars: [
{ key: 'GF_SECURITY_ADMIN_PASSWORD', value: 'admin', description: 'Admin password' },
],
},
{
id: 'uptime-kuma',
name: 'Uptime Kuma',
description: 'Self-hosted monitoring tool. Beautiful UI for tracking uptime of services.',
category: 'Monitoring',
iconName: 'monitor',
image: 'louislam/uptime-kuma:latest',
port: 3001,
},
{
id: 'plausible',
name: 'Plausible Analytics',
description: 'Privacy-friendly web analytics. No cookies, GDPR compliant by design.',
category: 'Analytics',
iconName: 'monitor',
image: 'plausible/analytics:latest',
port: 8000,
enableClickHouse: true,
},
{
id: 'vaultwarden',
name: 'Vaultwarden',
description: 'Lightweight Bitwarden-compatible password manager server.',
category: 'Security',
iconName: 'shield',
image: 'vaultwarden/server:latest',
port: 80,
},
{
id: 'n8n',
name: 'N8N',
description: 'Workflow automation tool. Connect anything to everything with a visual editor.',
category: 'Automation',
iconName: 'server',
image: 'n8nio/n8n:latest',
port: 5678,
},
{
id: 'mattermost',
name: 'Mattermost',
description: 'Open-source Slack alternative for team communication and collaboration.',
category: 'Communication',
iconName: 'mail',
image: 'mattermost/mattermost-team-edition:latest',
port: 8065,
},
{
id: 'portainer',
name: 'Portainer',
description: 'Docker management UI. Monitor and manage containers from a web interface.',
category: 'Dev Tools',
iconName: 'package',
image: 'portainer/portainer-ce:latest',
port: 9000,
},
{
id: 'redis',
name: 'Redis',
description: 'In-memory data store used as database, cache, and message broker.',
category: 'Database',
iconName: 'database',
image: 'redis:alpine',
port: 6379,
},
{
id: 'postgres',
name: 'PostgreSQL',
description: 'Advanced open-source relational database. Reliable and feature-rich.',
category: 'Database',
iconName: 'database',
image: 'postgres:16-alpine',
port: 5432,
envVars: [
{ key: 'POSTGRES_PASSWORD', value: '', description: 'Superuser password', required: true },
{ key: 'POSTGRES_USER', value: 'postgres', description: 'Superuser name' },
{ key: 'POSTGRES_DB', value: 'postgres', description: 'Default database name' },
],
},
{
id: 'mariadb',
name: 'MariaDB',
description: 'Community-developed fork of MySQL. Drop-in replacement with enhanced features.',
category: 'Database',
iconName: 'database',
image: 'mariadb:latest',
port: 3306,
envVars: [
{ key: 'MARIADB_ROOT_PASSWORD', value: '', description: 'Root password', required: true },
],
},
{
id: 'adminer',
name: 'Adminer',
description: 'Database management tool in a single PHP file. Supports MySQL, PostgreSQL, SQLite.',
category: 'Dev Tools',
iconName: 'database',
image: 'adminer:latest',
port: 8080,
},
];
@customElement('ob-view-appstore') @customElement('ob-view-appstore')
export class ObViewAppStore extends DeesElement { export class ObViewAppStore extends DeesElement {
@state()
accessor appStoreState: appstate.IAppStoreState = {
apps: [],
upgradeableServices: [],
};
@state()
accessor currentView: 'grid' | 'detail' = 'grid';
@state()
accessor selectedApp: interfaces.requests.ICatalogApp | null = null;
@state()
accessor selectedAppMeta: interfaces.requests.IAppMeta | null = null;
@state()
accessor selectedAppConfig: interfaces.requests.IAppVersionConfig | null = null;
@state()
accessor selectedVersion: string = '';
@state()
accessor editableEnvVars: Array<{ key: string; value: string; description: string; required?: boolean; platformInjected?: boolean }> = [];
@state()
accessor serviceName: string = '';
@state()
accessor serviceDomain: string = '';
@state()
accessor loading: boolean = false;
@state()
accessor deployMode: boolean = false;
public static styles = [ public static styles = [
cssManager.defaultStyles, cssManager.defaultStyles,
shared.viewHostCss, shared.viewHostCss,
css``, css`
.detail-card {
background: var(--ci-shade-1, #09090b);
border: 1px solid var(--ci-shade-2, #27272a);
border-radius: 8px;
padding: 24px;
margin-bottom: 16px;
}
.detail-header {
display: flex;
align-items: flex-start;
gap: 16px;
margin-bottom: 24px;
}
.detail-icon {
width: 64px;
height: 64px;
border-radius: 12px;
background: var(--ci-shade-2, #27272a);
display: flex;
align-items: center;
justify-content: center;
font-size: 28px;
font-weight: 700;
color: var(--ci-shade-5, #a1a1aa);
flex-shrink: 0;
}
.detail-title {
font-size: 24px;
font-weight: 700;
color: var(--ci-shade-7, #e4e4e7);
margin: 0 0 4px 0;
}
.detail-category {
display: inline-block;
padding: 2px 10px;
border-radius: 9999px;
font-size: 12px;
font-weight: 500;
background: var(--ci-shade-2, #27272a);
color: var(--ci-shade-5, #a1a1aa);
margin-bottom: 8px;
}
.detail-description {
font-size: 14px;
color: var(--ci-shade-5, #a1a1aa);
line-height: 1.6;
margin: 0;
}
.detail-meta {
display: flex;
gap: 16px;
margin-top: 8px;
font-size: 13px;
color: var(--ci-shade-4, #71717a);
}
.detail-meta a {
color: var(--ci-shade-5, #a1a1aa);
text-decoration: none;
}
.detail-meta a:hover {
text-decoration: underline;
}
.section-label {
font-size: 13px;
font-weight: 600;
color: var(--ci-shade-5, #a1a1aa);
text-transform: uppercase;
letter-spacing: 0.05em;
margin-bottom: 10px;
}
.badge {
display: inline-flex;
align-items: center;
gap: 4px;
padding: 4px 10px;
border-radius: 6px;
font-size: 12px;
font-weight: 500;
background: rgba(59, 130, 246, 0.15);
color: #60a5fa;
margin-right: 6px;
margin-bottom: 6px;
}
.version-row {
display: flex;
align-items: center;
gap: 16px;
}
.version-select {
background: var(--ci-shade-2, #27272a);
border: 1px solid var(--ci-shade-3, #3f3f46);
border-radius: 6px;
padding: 8px 12px;
color: var(--ci-shade-7, #e4e4e7);
font-size: 14px;
cursor: pointer;
}
.image-tag {
font-family: monospace;
font-size: 13px;
color: var(--ci-shade-5, #a1a1aa);
background: var(--ci-shade-2, #27272a);
padding: 4px 8px;
border-radius: 4px;
}
.env-table {
width: 100%;
border-collapse: collapse;
}
.env-table th {
text-align: left;
font-size: 12px;
font-weight: 500;
color: var(--ci-shade-4, #71717a);
padding: 8px 8px 8px 0;
border-bottom: 1px solid var(--ci-shade-2, #27272a);
}
.env-table td {
padding: 6px 8px 6px 0;
vertical-align: middle;
}
.env-input {
width: 100%;
background: var(--ci-shade-2, #27272a);
border: 1px solid var(--ci-shade-3, #3f3f46);
border-radius: 4px;
padding: 6px 8px;
color: var(--ci-shade-7, #e4e4e7);
font-size: 13px;
font-family: monospace;
box-sizing: border-box;
}
.env-input:disabled {
opacity: 0.5;
cursor: not-allowed;
}
.env-key {
font-family: monospace;
font-size: 13px;
color: var(--ci-shade-6, #d4d4d8);
white-space: nowrap;
}
.env-desc {
font-size: 12px;
color: var(--ci-shade-4, #71717a);
}
.env-badge {
font-size: 10px;
padding: 1px 6px;
border-radius: 3px;
margin-left: 6px;
}
.env-badge.required {
background: rgba(239, 68, 68, 0.15);
color: #f87171;
}
.env-badge.auto {
background: rgba(34, 197, 94, 0.15);
color: #4ade80;
}
.name-input {
background: var(--ci-shade-2, #27272a);
border: 1px solid var(--ci-shade-3, #3f3f46);
border-radius: 6px;
padding: 10px 14px;
color: var(--ci-shade-7, #e4e4e7);
font-size: 14px;
width: 300px;
box-sizing: border-box;
}
.actions-row {
display: flex;
justify-content: flex-end;
gap: 12px;
margin-top: 24px;
}
.btn {
display: inline-flex;
align-items: center;
gap: 8px;
padding: 10px 20px;
border: none;
border-radius: 6px;
font-size: 14px;
font-weight: 500;
cursor: pointer;
transition: opacity 200ms ease;
}
.btn:hover { opacity: 0.9; }
.btn-primary {
background: var(--ci-shade-7, #e4e4e7);
color: var(--ci-shade-0, #09090b);
}
.btn-secondary {
background: transparent;
border: 1px solid var(--ci-shade-2, #27272a);
color: var(--ci-shade-6, #d4d4d8);
}
.loading-spinner {
padding: 32px;
text-align: center;
color: var(--ci-shade-4, #71717a);
}
`,
]; ];
constructor() {
super();
const sub = appstate.appStoreStatePart
.select((s) => s)
.subscribe((newState) => {
this.appStoreState = newState;
});
this.rxSubscriptions.push(sub);
}
async connectedCallback() { async connectedCallback() {
super.connectedCallback(); super.connectedCallback();
await appstate.appStoreStatePart.dispatchAction(appstate.fetchAppTemplatesAction, null);
} }
public render(): TemplateResult { public render(): TemplateResult {
switch (this.currentView) {
case 'detail':
return this.renderDetailView();
default:
return this.renderGridView();
}
}
private renderGridView(): TemplateResult {
const appTemplates = this.appStoreState.apps.map((app) => ({
id: app.id,
name: app.name,
description: app.description,
category: app.category,
iconName: app.iconName,
iconUrl: app.iconUrl,
image: '',
port: 0,
}));
return html` return html`
<ob-sectionheading>App Store</ob-sectionheading> <ob-sectionheading>App Store</ob-sectionheading>
<sz-app-store-view ${appTemplates.length === 0
.apps=${appTemplates} ? html`<div class="loading-spinner">Loading app templates...</div>`
@deploy-app=${(e: CustomEvent) => this.handleDeployApp(e)} : html`
></sz-app-store-view> <sz-app-store-view
.apps=${appTemplates}
@view-app=${(e: CustomEvent) => this.handleViewDetails(e)}
@deploy-app=${(e: CustomEvent) => this.handleAppClick(e)}
></sz-app-store-view>
`}
`; `;
} }
private handleDeployApp(e: CustomEvent) { private renderDetailView(): TemplateResult {
if (this.loading) {
return html`
<ob-sectionheading>App Store</ob-sectionheading>
<div class="loading-spinner">Loading app details...</div>
`;
}
const app = this.selectedApp;
const meta = this.selectedAppMeta;
const config = this.selectedAppConfig;
if (!app || !config) {
return html`
<ob-sectionheading>App Store</ob-sectionheading>
<div class="loading-spinner">App not found.</div>
`;
}
const platformReqs = config.platformRequirements || {};
const hasPlatformReqs = Object.values(platformReqs).some(Boolean);
const platformLabels: Record<string, string> = {
mongodb: 'MongoDB',
s3: 'S3 (MinIO)',
clickhouse: 'ClickHouse',
redis: 'Redis',
mariadb: 'MariaDB',
};
return html`
<ob-sectionheading>App Store</ob-sectionheading>
<button class="btn btn-secondary" style="margin-bottom: 16px;" @click=${() => { this.currentView = 'grid'; }}>
&larr; Back to App Store
</button>
<!-- Header -->
<div class="detail-card">
<div class="detail-header">
<div class="detail-icon">${(app.name || '?')[0].toUpperCase()}</div>
<div style="flex: 1;">
<h2 class="detail-title">${app.name}</h2>
<span class="detail-category">${app.category}</span>
<p class="detail-description">${app.description}</p>
<div class="detail-meta">
${meta?.maintainer ? html`<span>Maintainer: <strong>${meta.maintainer}</strong></span>` : ''}
${meta?.links ? Object.entries(meta.links).map(([label, url]) =>
html`<a href="${url}" target="_blank" rel="noopener">${label}</a>`
) : ''}
${app.tags?.length ? html`<span>Tags: ${app.tags.join(', ')}</span>` : ''}
</div>
</div>
</div>
</div>
<!-- Platform Services -->
${hasPlatformReqs ? html`
<div class="detail-card">
<div class="section-label">Platform Services</div>
<div>
${Object.entries(platformReqs)
.filter(([_, enabled]) => enabled)
.map(([key]) => html`<span class="badge">${platformLabels[key] || key}</span>`)}
</div>
<div style="font-size: 12px; color: var(--ci-shade-4, #71717a); margin-top: 8px;">
These platform services will be automatically provisioned when you deploy.
</div>
</div>
` : ''}
<!-- Version & Image -->
<div class="detail-card">
<div class="section-label">Version</div>
<div class="version-row">
<select class="version-select" @change=${(e: Event) => this.handleVersionChange((e.target as HTMLSelectElement).value)}>
${(meta?.versions || [this.selectedVersion]).map((v) =>
html`<option value="${v}" ?selected=${v === this.selectedVersion}>${v}${v === app.latestVersion ? ' (latest)' : ''}</option>`
)}
</select>
<span class="image-tag">${config.image}</span>
${config.minOneboxVersion ? html`<span style="font-size: 12px; color: var(--ci-shade-4, #71717a);">Requires onebox &ge; ${config.minOneboxVersion}</span>` : ''}
</div>
</div>
<!-- Environment Variables -->
${this.editableEnvVars.length > 0 ? html`
<div class="detail-card">
<div class="section-label">Environment Variables</div>
<table class="env-table">
<thead>
<tr>
<th style="width: 30%;">Variable</th>
<th style="width: 40%;">Value</th>
<th>Description</th>
</tr>
</thead>
<tbody>
${this.editableEnvVars.map((ev, index) => html`
<tr>
<td>
<span class="env-key">${ev.key}</span>
${ev.required ? html`<span class="env-badge required">required</span>` : ''}
${ev.platformInjected ? html`<span class="env-badge auto">auto</span>` : ''}
</td>
<td>
<input
class="env-input"
type="text"
.value=${ev.value}
?disabled=${ev.platformInjected || !this.deployMode}
placeholder=${ev.platformInjected ? 'Auto-injected by platform' : 'Enter value...'}
@input=${(e: Event) => this.handleEnvVarChange(index, (e.target as HTMLInputElement).value)}
/>
</td>
<td><span class="env-desc">${ev.description || ''}</span></td>
</tr>
`)}
</tbody>
</table>
</div>
` : ''}
<!-- Deploy section (only in deploy mode) or action button (view mode) -->
${this.deployMode ? html`
<div class="detail-card">
<div class="section-label">Service Name</div>
<input
class="name-input"
type="text"
.value=${this.serviceName}
placeholder="e.g. my-ghost-blog"
@input=${(e: Event) => { this.serviceName = (e.target as HTMLInputElement).value; }}
/>
<div style="font-size: 12px; color: var(--ci-shade-4, #71717a); margin-top: 6px;">
Lowercase letters, numbers, and hyphens only.
</div>
<div class="section-label" style="margin-top: 18px;">Domain</div>
<input
class="name-input"
type="text"
.value=${this.serviceDomain}
placeholder="e.g. cloudly.example.com"
@input=${(e: Event) => this.handleServiceDomainChange((e.target as HTMLInputElement).value)}
/>
<div style="font-size: 12px; color: var(--ci-shade-4, #71717a); margin-top: 6px;">
Onebox routes this domain to the deployed app. Required when the app uses SERVICE_DOMAIN.
</div>
<div class="actions-row">
<button class="btn btn-secondary" @click=${() => { this.currentView = 'grid'; }}>Cancel</button>
<button class="btn btn-primary" @click=${() => this.handleDeploy()}>
Deploy v${this.selectedVersion}
</button>
</div>
</div>
` : html`
<div class="actions-row" style="margin-top: 8px;">
<button class="btn btn-secondary" @click=${() => { this.currentView = 'grid'; }}>
&larr; Back
</button>
<button class="btn btn-primary" @click=${() => { this.deployMode = true; }}>
Deploy this App
</button>
</div>
`}
`;
}
private async handleViewDetails(e: CustomEvent) {
const app = e.detail?.app; const app = e.detail?.app;
if (!app) return; if (!app) return;
// Store the template and navigate on next microtask to avoid const catalogApp = this.appStoreState.apps.find((a) => a.id === app.id);
// destroying the current view while the event handler is still on the call stack if (!catalogApp) return;
setTimeout(() => {
// Set both pendingAppTemplate and activeView atomically this.deployMode = false;
appstate.uiStatePart.setState({ this.selectedApp = catalogApp;
...appstate.uiStatePart.getState(), this.selectedVersion = catalogApp.latestVersion;
pendingAppTemplate: app, this.serviceName = catalogApp.id;
activeView: 'services', this.loading = true;
this.currentView = 'detail';
await this.fetchVersionConfig(catalogApp.id, catalogApp.latestVersion);
this.loading = false;
}
private async handleAppClick(e: CustomEvent) {
const app = e.detail?.app;
if (!app) return;
const catalogApp = this.appStoreState.apps.find((a) => a.id === app.id);
if (!catalogApp) return;
this.deployMode = true;
this.selectedApp = catalogApp;
this.selectedVersion = catalogApp.latestVersion;
this.serviceName = catalogApp.id;
this.loading = true;
this.currentView = 'detail';
await this.fetchVersionConfig(catalogApp.id, catalogApp.latestVersion);
this.loading = false;
}
private async handleVersionChange(version: string) {
if (!this.selectedApp || version === this.selectedVersion) return;
this.selectedVersion = version;
this.loading = true;
await this.fetchVersionConfig(this.selectedApp.id, version);
this.loading = false;
}
private async fetchVersionConfig(appId: string, version: string) {
try {
const identity = appstate.loginStatePart.getState().identity;
if (!identity) return;
const typedRequest = new plugins.domtools.plugins.typedrequest.TypedRequest<
interfaces.requests.IReq_GetAppConfig
>('/typedrequest', 'getAppConfig');
const response = await typedRequest.fire({ identity, appId, version });
this.selectedAppMeta = response.appMeta;
this.selectedAppConfig = response.config;
// Build editable env vars
this.editableEnvVars = (response.config.envVars || []).map((ev) => ({
key: ev.key,
value: ev.value || '',
description: ev.description || '',
required: ev.required,
platformInjected: ev.value?.includes('${') || false,
}));
this.serviceDomain = '';
} catch (err) {
console.error('Failed to fetch app config:', err);
}
}
private handleEnvVarChange(index: number, value: string) {
const updated = [...this.editableEnvVars];
updated[index] = { ...updated[index], value };
this.editableEnvVars = updated;
}
private handleServiceDomainChange(valueArg: string) {
this.serviceDomain = this.normalizeDomain(valueArg);
}
private normalizeDomain(valueArg: string) {
return valueArg.trim().replace(/^https?:\/\//, '').replace(/\/$/, '');
}
private async handleDeploy() {
const app = this.selectedApp;
const config = this.selectedAppConfig;
if (!app || !config) return;
const missingRequiredEnvVars = this.editableEnvVars.filter((envVarArg) => {
return envVarArg.required && !envVarArg.platformInjected && !envVarArg.value.trim();
});
if (missingRequiredEnvVars.length > 0) {
console.error(
`Missing required environment variables: ${missingRequiredEnvVars
.map((envVarArg) => envVarArg.key)
.join(', ')}`,
);
return;
}
const needsServiceDomain = (config.envVars || []).some((envVarArg) => {
return envVarArg.value?.includes('${SERVICE_DOMAIN}');
});
if (needsServiceDomain && !this.serviceDomain) {
console.error('A domain is required for this app.');
return;
}
const envVars: Record<string, string> = {};
for (const ev of this.editableEnvVars) {
if (ev.key && ev.value) {
envVars[ev.key] = ev.value;
}
}
const platformReqs = config.platformRequirements || {};
const serviceConfig: interfaces.data.IServiceCreate = {
name: this.serviceName || app.id,
image: config.image,
port: config.port || 80,
domain: this.serviceDomain || undefined,
envVars,
enableMongoDB: platformReqs.mongodb || false,
enableS3: platformReqs.s3 || false,
enableClickHouse: platformReqs.clickhouse || false,
enableRedis: platformReqs.redis || false,
enableMariaDB: platformReqs.mariadb || false,
appTemplateId: app.id,
appTemplateVersion: this.selectedVersion,
};
try {
await appstate.servicesStatePart.dispatchAction(appstate.createServiceAction, {
config: serviceConfig,
}); });
}, 0); setTimeout(() => {
appRouter.navigateToView('services');
}, 0);
} catch (err) {
console.error('Failed to deploy from App Store:', err);
}
} }
} }
+11 -8
View File
@@ -1,6 +1,7 @@
import * as plugins from '../plugins.js'; import * as plugins from '../plugins.js';
import * as shared from './shared/index.js'; import * as shared from './shared/index.js';
import * as appstate from '../appstate.js'; import * as appstate from '../appstate.js';
import { appRouter } from '../router.js';
import { import {
DeesElement, DeesElement,
customElement, customElement,
@@ -114,11 +115,13 @@ export class ObViewDashboard extends DeesElement {
networkOut: status?.docker?.networkOut || 0, networkOut: status?.docker?.networkOut || 0,
topConsumers: [], topConsumers: [],
}, },
platformServices: platformServices.map((ps) => ({ platformServices: platformServices
name: ps.displayName, .filter((ps) => ps.status === 'running' || ps.status === 'starting' || ps.status === 'stopping' || ps.isCore)
status: ps.status === 'running' ? 'running' : 'stopped', .map((ps) => ({
running: ps.status === 'running', name: ps.displayName,
})), status: ps.status === 'running' ? 'Running' : ps.status === 'starting' ? 'Starting...' : ps.status === 'stopping' ? 'Stopping...' : 'Stopped',
running: ps.status === 'running',
})),
traffic: { traffic: {
requests: 0, requests: 0,
errors: 0, errors: 0,
@@ -159,9 +162,9 @@ export class ObViewDashboard extends DeesElement {
private handleQuickAction(e: CustomEvent) { private handleQuickAction(e: CustomEvent) {
const action = e.detail?.action || e.detail?.label; const action = e.detail?.action || e.detail?.label;
if (action === 'Deploy Service') { if (action === 'Deploy Service') {
appstate.uiStatePart.dispatchAction(appstate.setActiveViewAction, { view: 'services' }); appRouter.navigateToView('services');
} else if (action === 'Add Domain') { } else if (action === 'Add Domain') {
appstate.uiStatePart.dispatchAction(appstate.setActiveViewAction, { view: 'network' }); appRouter.navigateToView('network');
} }
} }
@@ -178,7 +181,7 @@ export class ObViewDashboard extends DeesElement {
...appstate.servicesStatePart.getState(), ...appstate.servicesStatePart.getState(),
currentPlatformService: ps, currentPlatformService: ps,
}); });
appstate.uiStatePart.dispatchAction(appstate.setActiveViewAction, { view: 'services' }); appRouter.navigateToView('services');
} }
} }
} }
+2 -1
View File
@@ -1,6 +1,7 @@
import * as plugins from '../plugins.js'; import * as plugins from '../plugins.js';
import * as shared from './shared/index.js'; import * as shared from './shared/index.js';
import * as appstate from '../appstate.js'; import * as appstate from '../appstate.js';
import { appRouter } from '../router.js';
import { import {
DeesElement, DeesElement,
customElement, customElement,
@@ -64,7 +65,7 @@ export class ObViewRegistries extends DeesElement {
.registryUrl=${'localhost:5000'} .registryUrl=${'localhost:5000'}
@manage-tokens=${() => { @manage-tokens=${() => {
// tokens are managed via the tokens view // tokens are managed via the tokens view
appstate.uiStatePart.dispatchAction(appstate.setActiveViewAction, { view: 'tokens' }); appRouter.navigateToView('tokens');
}} }}
></sz-registry-advertisement> ></sz-registry-advertisement>
`; `;
+112 -30
View File
@@ -142,6 +142,12 @@ export class ObViewServices extends DeesElement {
@state() @state()
accessor pendingTemplate: any = null; accessor pendingTemplate: any = null;
@state()
accessor appStoreState: appstate.IAppStoreState = {
apps: [],
upgradeableServices: [],
};
constructor() { constructor() {
super(); super();
@@ -159,7 +165,12 @@ export class ObViewServices extends DeesElement {
}); });
this.rxSubscriptions.push(backupsSub); this.rxSubscriptions.push(backupsSub);
// No subscription needed — pendingAppTemplate is checked in render() const appStoreSub = appstate.appStoreStatePart
.select((s) => s)
.subscribe((newState) => {
this.appStoreState = newState;
});
this.rxSubscriptions.push(appStoreSub);
} }
public static styles = [ public static styles = [
@@ -215,6 +226,7 @@ export class ObViewServices extends DeesElement {
await Promise.all([ await Promise.all([
appstate.servicesStatePart.dispatchAction(appstate.fetchServicesAction, null), appstate.servicesStatePart.dispatchAction(appstate.fetchServicesAction, null),
appstate.servicesStatePart.dispatchAction(appstate.fetchPlatformServicesAction, null), appstate.servicesStatePart.dispatchAction(appstate.fetchPlatformServicesAction, null),
appstate.appStoreStatePart.dispatchAction(appstate.fetchUpgradeableServicesAction, null),
]); ]);
// If a platform service was selected from the dashboard, navigate to its detail // If a platform service was selected from the dashboard, navigate to its detail
@@ -230,20 +242,6 @@ export class ObViewServices extends DeesElement {
} }
updated(changedProperties: Map<string, any>) {
super.updated(changedProperties);
// Check for pending app template from the App Store after each update
const uiState = appstate.uiStatePart.getState();
if (uiState.pendingAppTemplate && !this.pendingTemplate) {
this.pendingTemplate = uiState.pendingAppTemplate;
appstate.uiStatePart.setState({
...appstate.uiStatePart.getState(),
pendingAppTemplate: undefined,
});
this.currentView = 'create';
}
}
public render(): TemplateResult { public render(): TemplateResult {
switch (this.currentView) { switch (this.currentView) {
case 'create': case 'create':
@@ -277,7 +275,14 @@ export class ObViewServices extends DeesElement {
default: return status; default: return status;
} }
}; };
const mappedPlatformServices = this.servicesState.platformServices.map((ps) => ({ // Split platform services into active (running or core) and inactive (not in use)
const activePlatformServices = this.servicesState.platformServices.filter(
(ps) => ps.status === 'running' || ps.status === 'starting' || ps.status === 'stopping' || ps.isCore,
);
const inactivePlatformServices = this.servicesState.platformServices.filter(
(ps) => !ps.isCore && (ps.status === 'not-deployed' || ps.status === 'stopped' || ps.status === 'failed'),
);
const mappedActivePlatformServices = activePlatformServices.map((ps) => ({
name: ps.displayName, name: ps.displayName,
status: displayStatus(ps.status), status: displayStatus(ps.status),
running: ps.status === 'running', running: ps.status === 'running',
@@ -313,17 +318,45 @@ export class ObViewServices extends DeesElement {
></sz-services-list-view> ></sz-services-list-view>
<ob-sectionheading style="margin-top: 32px;">Platform Services</ob-sectionheading> <ob-sectionheading style="margin-top: 32px;">Platform Services</ob-sectionheading>
<div style="max-width: 500px;"> <div style="max-width: 500px;">
<sz-platform-services-card ${mappedActivePlatformServices.length > 0 ? html`
.services=${mappedPlatformServices} <sz-platform-services-card
@service-click=${(e: CustomEvent) => { .services=${mappedActivePlatformServices}
const type = e.detail.type || this.servicesState.platformServices.find( @service-click=${(e: CustomEvent) => {
(ps) => ps.displayName === e.detail.name, const type = e.detail.type || this.servicesState.platformServices.find(
)?.type; (ps) => ps.displayName === e.detail.name,
if (type) { )?.type;
this.navigateToPlatformDetail(type); if (type) {
} this.navigateToPlatformDetail(type);
}} }
></sz-platform-services-card> }}
></sz-platform-services-card>
` : ''}
${inactivePlatformServices.length > 0 ? html`
<div style="
background: var(--ci-shade-1, #09090b);
border: 1px solid var(--ci-shade-2, #27272a);
border-radius: 8px;
padding: 20px;
margin-top: ${mappedActivePlatformServices.length > 0 ? '12px' : '0'};
opacity: 0.5;
">
<div style="font-size: 13px; color: var(--ci-shade-4, #71717a); margin-bottom: 12px;">Available — not in use</div>
<div style="display: flex; flex-direction: column; gap: 12px;">
${inactivePlatformServices.map((ps) => html`
<div
style="display: flex; justify-content: space-between; align-items: center; padding: 8px 0; cursor: pointer; transition: opacity 200ms ease;"
@click=${() => this.navigateToPlatformDetail(ps.type)}
>
<div style="display: flex; align-items: center; gap: 10px;">
<div style="width: 8px; height: 8px; border-radius: 50%; background: var(--ci-shade-3, #3f3f46); flex-shrink: 0;"></div>
<span style="font-size: 14px; font-weight: 500; color: var(--ci-shade-4, #71717a);">${ps.displayName}</span>
</div>
<span style="font-size: 13px; color: var(--ci-shade-3, #3f3f46);">${displayStatus(ps.status)}</span>
</div>
`)}
</div>
</div>
` : ''}
</div> </div>
`; `;
} }
@@ -344,6 +377,8 @@ export class ObViewServices extends DeesElement {
enableMongoDB: template.enableMongoDB || false, enableMongoDB: template.enableMongoDB || false,
enableS3: template.enableS3 || false, enableS3: template.enableS3 || false,
enableClickHouse: template.enableClickHouse || false, enableClickHouse: template.enableClickHouse || false,
enableRedis: template.enableRedis || false,
enableMariaDB: template.enableMariaDB || false,
}; };
await appstate.servicesStatePart.dispatchAction(appstate.createServiceAction, { await appstate.servicesStatePart.dispatchAction(appstate.createServiceAction, {
config: serviceConfig, config: serviceConfig,
@@ -368,12 +403,14 @@ export class ObViewServices extends DeesElement {
<div><span style="color: var(--ci-shade-5, #a1a1aa);">Service Name:</span> <strong>${t.id}</strong></div> <div><span style="color: var(--ci-shade-5, #a1a1aa);">Service Name:</span> <strong>${t.id}</strong></div>
<div><span style="color: var(--ci-shade-5, #a1a1aa);">Category:</span> <strong>${t.category}</strong></div> <div><span style="color: var(--ci-shade-5, #a1a1aa);">Category:</span> <strong>${t.category}</strong></div>
</div> </div>
${t.enableMongoDB || t.enableS3 || t.enableClickHouse ? html` ${t.enableMongoDB || t.enableS3 || t.enableClickHouse || t.enableRedis || t.enableMariaDB ? html`
<div style="margin-top: 12px; font-size: 13px; color: var(--ci-shade-5, #a1a1aa);"> <div style="margin-top: 12px; font-size: 13px; color: var(--ci-shade-5, #a1a1aa);">
Platform Services: Platform Services:
${t.enableMongoDB ? html`<span style="margin-right: 8px;">MongoDB</span>` : ''} ${t.enableMongoDB ? html`<span style="margin-right: 8px;">MongoDB</span>` : ''}
${t.enableS3 ? html`<span style="margin-right: 8px;">S3</span>` : ''} ${t.enableS3 ? html`<span style="margin-right: 8px;">S3</span>` : ''}
${t.enableClickHouse ? html`<span>ClickHouse</span>` : ''} ${t.enableClickHouse ? html`<span style="margin-right: 8px;">ClickHouse</span>` : ''}
${t.enableRedis ? html`<span style="margin-right: 8px;">Redis</span>` : ''}
${t.enableMariaDB ? html`<span style="margin-right: 8px;">MariaDB</span>` : ''}
</div> </div>
` : ''} ` : ''}
</div> </div>
@@ -407,6 +444,8 @@ export class ObViewServices extends DeesElement {
enableMongoDB: formConfig.enableMongoDB || false, enableMongoDB: formConfig.enableMongoDB || false,
enableS3: formConfig.enableS3 || false, enableS3: formConfig.enableS3 || false,
enableClickHouse: formConfig.enableClickHouse || false, enableClickHouse: formConfig.enableClickHouse || false,
enableRedis: formConfig.enableRedis || false,
enableMariaDB: formConfig.enableMariaDB || false,
}; };
await appstate.servicesStatePart.dispatchAction(appstate.createServiceAction, { await appstate.servicesStatePart.dispatchAction(appstate.createServiceAction, {
config: serviceConfig, config: serviceConfig,
@@ -428,8 +467,49 @@ export class ObViewServices extends DeesElement {
: defaultStats; : defaultStats;
const transformedLogs = parseLogs(this.servicesState.currentServiceLogs); const transformedLogs = parseLogs(this.servicesState.currentServiceLogs);
// Check if this service has an available upgrade
const upgradeInfo = service
? this.appStoreState.upgradeableServices.find((u) => u.serviceName === service.name)
: null;
return html` return html`
<ob-sectionheading>Service Details</ob-sectionheading> <ob-sectionheading>Service Details</ob-sectionheading>
${upgradeInfo ? html`
<div style="
background: linear-gradient(135deg, rgba(59, 130, 246, 0.1), rgba(139, 92, 246, 0.1));
border: 1px solid rgba(59, 130, 246, 0.3);
border-radius: 8px;
padding: 16px;
margin-bottom: 16px;
display: flex;
justify-content: space-between;
align-items: center;
">
<div>
<div style="font-size: 14px; font-weight: 600; color: var(--ci-shade-7, #e4e4e7);">
Update available: v${upgradeInfo.currentVersion} &rarr; v${upgradeInfo.latestVersion}
</div>
<div style="font-size: 12px; color: var(--ci-shade-4, #71717a); margin-top: 4px;">
${upgradeInfo.hasMigration ? 'Migration script available' : 'Config-only upgrade'}
</div>
</div>
<button
class="deploy-button"
style="padding: 8px 16px; font-size: 13px;"
@click=${async () => {
await appstate.appStoreStatePart.dispatchAction(appstate.upgradeServiceAction, {
serviceName: upgradeInfo.serviceName,
targetVersion: upgradeInfo.latestVersion,
});
// Refresh service data
appstate.servicesStatePart.dispatchAction(appstate.fetchServiceAction, {
name: upgradeInfo.serviceName,
});
appstate.servicesStatePart.dispatchAction(appstate.fetchServicesAction, null);
}}
>Upgrade</button>
</div>
` : ''}
<sz-service-detail-view <sz-service-detail-view
.service=${transformedService} .service=${transformedService}
.logs=${transformedLogs} .logs=${transformedLogs}
@@ -529,7 +609,9 @@ export class ObViewServices extends DeesElement {
mongodb: { host: 'onebox-mongodb', port: 27017, version: '4.4', config: { engine: 'WiredTiger', authEnabled: true } }, mongodb: { host: 'onebox-mongodb', port: 27017, version: '4.4', config: { engine: 'WiredTiger', authEnabled: true } },
minio: { host: 'onebox-minio', port: 9000, version: 'latest', config: { consolePort: 9001, region: 'us-east-1' } }, minio: { host: 'onebox-minio', port: 9000, version: 'latest', config: { consolePort: 9001, region: 'us-east-1' } },
clickhouse: { host: 'onebox-clickhouse', port: 8123, version: 'latest', config: { nativePort: 9000, httpPort: 8123 } }, clickhouse: { host: 'onebox-clickhouse', port: 8123, version: 'latest', config: { nativePort: 9000, httpPort: 8123 } },
caddy: { host: 'onebox-caddy', port: 80, version: '2-alpine', config: { httpsPort: 443, adminApi: 2019 } }, smartproxy: { host: 'onebox-smartproxy', port: 80, version: 'latest', config: { httpsPort: 443, adminApi: 2019 } },
mariadb: { host: 'onebox-mariadb', port: 3306, version: '11', config: { engine: 'InnoDB', authEnabled: true } },
redis: { host: 'onebox-redis', port: 6379, version: '7-alpine', config: { appendonly: true, maxDatabases: 16 } },
}; };
const info = platformService const info = platformService
? serviceInfo[platformService.type] || { host: 'unknown', port: 0, version: '', config: {} } ? serviceInfo[platformService.type] || { host: 'unknown', port: 0, version: '', config: {} }
+134 -1
View File
@@ -46,7 +46,61 @@ export class ObViewSettings extends DeesElement {
public static styles = [ public static styles = [
cssManager.defaultStyles, cssManager.defaultStyles,
shared.viewHostCss, shared.viewHostCss,
css``, css`
.gateway-card {
margin-bottom: 24px;
border: 1px solid ${cssManager.bdTheme('#e4e4e7', '#27272a')};
border-radius: 12px;
background: ${cssManager.bdTheme('#ffffff', '#09090b')};
overflow: hidden;
box-shadow: 0 1px 2px ${cssManager.bdTheme('rgba(0,0,0,0.04)', 'rgba(0,0,0,0.2)')};
}
.gateway-header {
padding: 16px 20px;
border-bottom: 1px solid ${cssManager.bdTheme('#f4f4f5', '#27272a')};
background: ${cssManager.bdTheme('#fafafa', '#101013')};
}
.gateway-title {
font-size: 15px;
font-weight: 600;
color: ${cssManager.bdTheme('#18181b', '#fafafa')};
}
.gateway-subtitle {
margin-top: 4px;
font-size: 13px;
color: ${cssManager.bdTheme('#71717a', '#a1a1aa')};
}
.gateway-content {
padding: 20px;
display: grid;
grid-template-columns: repeat(2, minmax(0, 1fr));
gap: 16px;
}
.gateway-field.full {
grid-column: 1 / -1;
}
dees-input-text {
width: 100%;
}
.gateway-footer {
display: flex;
justify-content: flex-end;
padding: 0 20px 20px;
}
@media (max-width: 700px) {
.gateway-content {
grid-template-columns: 1fr;
}
}
`,
]; ];
async connectedCallback() { async connectedCallback() {
@@ -57,6 +111,7 @@ export class ObViewSettings extends DeesElement {
public render(): TemplateResult { public render(): TemplateResult {
return html` return html`
<ob-sectionheading>Settings</ob-sectionheading> <ob-sectionheading>Settings</ob-sectionheading>
${this.renderExternalGatewaySettings()}
<sz-settings-view <sz-settings-view
.settings=${this.settingsState.settings || { .settings=${this.settingsState.settings || {
darkMode: true, darkMode: true,
@@ -90,4 +145,82 @@ export class ObViewSettings extends DeesElement {
></sz-settings-view> ></sz-settings-view>
`; `;
} }
private renderExternalGatewaySettings(): TemplateResult {
const settings = this.settingsState.settings;
return html`
<section class="gateway-card">
<div class="gateway-header">
<div class="gateway-title">Delegate Routing</div>
<div class="gateway-subtitle">Delegate public WorkApp routing, DNS, and certificates to a dcrouter edge authority.</div>
</div>
<div class="gateway-content">
${this.renderGatewayInput('dcrouterGatewayUrl', 'Gateway URL', settings?.dcrouterGatewayUrl || '', 'Base URL of the dcrouter OpsServer.')}
${this.renderGatewayInput('dcrouterGatewayApiToken', 'API Token', settings?.dcrouterGatewayApiToken || '', 'Requires workhosters and certificates scopes.', true)}
${this.renderGatewayInput('dcrouterWorkHosterId', 'WorkHoster ID', settings?.dcrouterWorkHosterId || '', 'Leave empty to let Onebox create a stable ID.')}
${this.renderGatewayInput('dcrouterTargetHost', 'Target Host', settings?.dcrouterTargetHost || '', 'Defaults to the configured server IP when empty.')}
${this.renderGatewayInput('dcrouterTargetPort', 'Target Port', String(settings?.dcrouterTargetPort || 80), 'Internal HTTP port dcrouter forwards to.')}
</div>
<div class="gateway-footer">
<dees-button
.text=${'Save Gateway Settings'}
.type=${'default'}
.icon=${'lucide:Save'}
@click=${() => this.saveExternalGatewaySettings()}
></dees-button>
</div>
</section>
`;
}
private renderGatewayInput(
key: keyof NonNullable<appstate.ISettingsState['settings']>,
label: string,
value: string,
hint: string,
isPassword = false,
): TemplateResult {
return html`
<div class="gateway-field ${key === 'dcrouterGatewayUrl' ? 'full' : ''}">
<dees-input-text
.key=${key}
.label=${label}
.value=${value}
.description=${hint}
.isPasswordBool=${isPassword}
@input=${(event: Event) => this.updateGatewayDraft(key, (event.target as HTMLInputElement).value)}
></dees-input-text>
</div>
`;
}
private updateGatewayDraft(
key: keyof NonNullable<appstate.ISettingsState['settings']>,
value: string,
): void {
const currentSettings = this.settingsState.settings || {} as NonNullable<appstate.ISettingsState['settings']>;
const nextValue = key === 'dcrouterTargetPort' ? Number(value) || 0 : value;
this.settingsState = {
...this.settingsState,
settings: {
...currentSettings,
[key]: nextValue,
},
};
}
private async saveExternalGatewaySettings(): Promise<void> {
const settings = this.settingsState.settings;
if (!settings) return;
await appstate.settingsStatePart.dispatchAction(appstate.updateSettingsAction, {
settings: {
dcrouterGatewayUrl: settings.dcrouterGatewayUrl || '',
dcrouterGatewayApiToken: settings.dcrouterGatewayApiToken || '',
dcrouterWorkHosterId: settings.dcrouterWorkHosterId || '',
dcrouterTargetHost: settings.dcrouterTargetHost || '',
dcrouterTargetPort: Number(settings.dcrouterTargetPort) || 80,
},
});
}
} }
+4
View File
@@ -1,6 +1,10 @@
import * as plugins from './plugins.js'; import * as plugins from './plugins.js';
import { html } from '@design.estate/dees-element'; import { html } from '@design.estate/dees-element';
import './elements/index.js'; import './elements/index.js';
import { appRouter } from './router.js';
// Initialize router before rendering (handles initial URL → state)
appRouter.init();
plugins.deesElement.render(html` plugins.deesElement.render(html`
<ob-app-shell></ob-app-shell> <ob-app-shell></ob-app-shell>
+110
View File
@@ -0,0 +1,110 @@
import * as plugins from './plugins.js';
import * as appstate from './appstate.js';
const SmartRouter = plugins.domtools.plugins.smartrouter.SmartRouter;
export const validViews = [
'dashboard', 'app-store', 'services', 'network',
'registries', 'tokens', 'settings',
] as const;
export type TValidView = typeof validViews[number];
class AppRouter {
private router: InstanceType<typeof SmartRouter>;
private initialized = false;
private suppressStateUpdate = false;
constructor() {
this.router = new SmartRouter({ debug: false });
}
public init(): void {
if (this.initialized) return;
this.setupRoutes();
this.setupStateSync();
this.handleInitialRoute();
this.initialized = true;
}
private setupRoutes(): void {
for (const view of validViews) {
this.router.on(`/${view}`, async () => {
this.updateViewState(view);
});
}
// Root redirect
this.router.on('/', async () => {
this.navigateTo('/dashboard');
});
}
private setupStateSync(): void {
appstate.uiStatePart.select((s) => s.activeView).subscribe((activeView) => {
if (this.suppressStateUpdate) return;
const currentPath = window.location.pathname;
const expectedPath = `/${activeView}`;
if (currentPath !== expectedPath) {
this.suppressStateUpdate = true;
this.router.pushUrl(expectedPath);
this.suppressStateUpdate = false;
}
});
}
private handleInitialRoute(): void {
const path = window.location.pathname;
if (!path || path === '/') {
this.router.pushUrl('/dashboard');
} else {
const segments = path.split('/').filter(Boolean);
const view = segments[0];
if (validViews.includes(view as TValidView)) {
this.updateViewState(view as TValidView);
} else {
this.router.pushUrl('/dashboard');
}
}
}
private updateViewState(view: string): void {
this.suppressStateUpdate = true;
const currentState = appstate.uiStatePart.getState();
if (currentState.activeView !== view) {
appstate.uiStatePart.setState({
...currentState,
activeView: view,
});
}
this.suppressStateUpdate = false;
}
public navigateTo(path: string): void {
this.router.pushUrl(path);
}
public navigateToView(view: string): void {
const normalized = view.toLowerCase().replace(/\s+/g, '-');
if (validViews.includes(normalized as TValidView)) {
this.navigateTo(`/${normalized}`);
} else {
this.navigateTo('/dashboard');
}
}
public getCurrentView(): string {
return appstate.uiStatePart.getState().activeView;
}
public destroy(): void {
this.router.destroy();
this.initialized = false;
}
}
export const appRouter = new AppRouter();